/drivers/edac/ |
D | pnd2_edac.c | 95 struct dram_addr *daddr, char *msg); 905 struct dram_addr *daddr, char *msg) in apl_pmi2mem() argument 962 daddr->col = column; in apl_pmi2mem() 963 daddr->bank = bank; in apl_pmi2mem() 964 daddr->row = row; in apl_pmi2mem() 965 daddr->rank = rank; in apl_pmi2mem() 966 daddr->dimm = 0; in apl_pmi2mem() 975 struct dram_addr *daddr, char *msg) in dnv_pmi2mem() argument 978 daddr->rank = dnv_get_bit(pmiaddr, dmap[pmiidx].rs0 + 13, 0); in dnv_pmi2mem() 980 daddr->rank |= dnv_get_bit(pmiaddr, dmap[pmiidx].rs1 + 13, 1); in dnv_pmi2mem() [all …]
|
/drivers/infiniband/sw/rxe/ |
D | rxe_net.c | 25 struct in_addr *daddr) in rxe_find_route4() argument 33 memcpy(&fl.daddr, daddr, sizeof(*daddr)); in rxe_find_route4() 38 pr_err_ratelimited("no route to %pI4\n", &daddr->s_addr); in rxe_find_route4() 48 struct in6_addr *daddr) in rxe_find_route6() argument 56 memcpy(&fl6.daddr, daddr, sizeof(*daddr)); in rxe_find_route6() 63 pr_err_ratelimited("no route to %pI6\n", daddr); in rxe_find_route6() 68 pr_err("no route to %pI6\n", daddr); in rxe_find_route6() 82 struct in6_addr *daddr) in rxe_find_route6() argument 104 struct in_addr *daddr; in rxe_find_route() local 107 daddr = &av->dgid_addr._sockaddr_in.sin_addr; in rxe_find_route() [all …]
|
D | rxe_recv.c | 106 struct in_addr *daddr = in check_addr() local 109 if ((ip_hdr(skb)->daddr != saddr->s_addr) || in check_addr() 110 (ip_hdr(skb)->saddr != daddr->s_addr)) in check_addr() 116 struct in6_addr *daddr = in check_addr() local 119 if (memcmp(&ipv6_hdr(skb)->daddr, saddr, sizeof(*saddr)) || in check_addr() 120 memcmp(&ipv6_hdr(skb)->saddr, daddr, sizeof(*daddr))) in check_addr() 192 ipv6_addr_set_v4mapped(ip_hdr(skb)->daddr, in rxe_rcv_mcast_pkt() 195 memcpy(&dgid, &ipv6_hdr(skb)->daddr, sizeof(dgid)); in rxe_rcv_mcast_pkt() 287 ipv6_addr_set_v4mapped(ip_hdr(skb)->daddr, in rxe_chk_dgid() 291 pdgid = (union ib_gid *)&ipv6_hdr(skb)->daddr; in rxe_chk_dgid()
|
/drivers/staging/wlan-ng/ |
D | p80211conv.c | 178 memcpy(p80211_hdr->address1, &e_hdr.daddr, ETH_ALEN); in skb_ether_to_p80211() 186 memcpy(p80211_hdr->address3, &e_hdr.daddr, ETH_ALEN); in skb_ether_to_p80211() 190 memcpy(p80211_hdr->address1, &e_hdr.daddr, ETH_ALEN); in skb_ether_to_p80211() 282 u8 daddr[ETH_ALEN]; in skb_p80211_to_ether() local 299 ether_addr_copy(daddr, w_hdr->address1); in skb_p80211_to_ether() 303 ether_addr_copy(daddr, w_hdr->address1); in skb_p80211_to_ether() 307 ether_addr_copy(daddr, w_hdr->address3); in skb_p80211_to_ether() 316 ether_addr_copy(daddr, w_hdr->address3); in skb_p80211_to_ether() 362 ((!ether_addr_equal_unaligned(daddr, e_hdr->daddr)) || in skb_p80211_to_ether() 409 ether_addr_copy(e_hdr->daddr, daddr); in skb_p80211_to_ether() [all …]
|
/drivers/net/ethernet/intel/ixgbevf/ |
D | ipsec.c | 34 memcpy(sam->addr, &xs->id.daddr.a6, sizeof(xs->id.daddr.a6)); in ixgbevf_ipsec_set_pf_sa() 36 memcpy(sam->addr, &xs->id.daddr.a4, sizeof(xs->id.daddr.a4)); in ixgbevf_ipsec_set_pf_sa() 179 __be32 *daddr, u8 proto, in ixgbevf_ipsec_find_rx_state() argument 189 ((ip4 && *daddr == rsa->xs->id.daddr.a4) || in ixgbevf_ipsec_find_rx_state() 190 (!ip4 && !memcmp(daddr, &rsa->xs->id.daddr.a6, in ixgbevf_ipsec_find_rx_state() 191 sizeof(rsa->xs->id.daddr.a6)))) && in ixgbevf_ipsec_find_rx_state() 315 memcpy(rsa.ipaddr, &xs->id.daddr.a6, 16); in ixgbevf_ipsec_add_sa() 317 memcpy(&rsa.ipaddr[3], &xs->id.daddr.a4, 4); in ixgbevf_ipsec_add_sa() 563 void *daddr; in ixgbevf_ipsec_rx() local 575 daddr = &ip4->daddr; in ixgbevf_ipsec_rx() [all …]
|
/drivers/gpu/drm/radeon/ |
D | radeon_benchmark.c | 36 uint64_t saddr, uint64_t daddr, in radeon_benchmark_do_move() argument 49 fence = radeon_copy_dma(rdev, saddr, daddr, in radeon_benchmark_do_move() 54 fence = radeon_copy_blit(rdev, saddr, daddr, in radeon_benchmark_do_move() 92 uint64_t saddr, daddr; in radeon_benchmark_move() local 116 r = radeon_bo_pin(dobj, ddomain, &daddr); in radeon_benchmark_move() 123 time = radeon_benchmark_do_move(rdev, size, saddr, daddr, in radeon_benchmark_move() 134 time = radeon_benchmark_do_move(rdev, size, saddr, daddr, in radeon_benchmark_move()
|
/drivers/net/ethernet/chelsio/libcxgb/ |
D | libcxgb_cm.c | 59 __func__, ntohl(ip->saddr), ntohl(ip->daddr), in cxgb_get_4tuple() 63 memcpy(local_ip, &ip->daddr, 4); in cxgb_get_4tuple() 66 __func__, ip6->saddr.s6_addr, ip6->daddr.s6_addr, in cxgb_get_4tuple() 70 memcpy(local_ip, ip6->daddr.s6_addr, 16); in cxgb_get_4tuple() 132 memcpy(&fl6.daddr, peer_ip, 16); in cxgb_find_route6() 134 if (ipv6_addr_type(&fl6.daddr) & IPV6_ADDR_LINKLOCAL) in cxgb_find_route6()
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_benchmark.c | 32 uint64_t saddr, uint64_t daddr, int n, s64 *time_ms) in amdgpu_benchmark_do_move() argument 41 r = amdgpu_copy_buffer(ring, saddr, daddr, size, NULL, &fence, in amdgpu_benchmark_do_move() 80 uint64_t saddr, daddr; in amdgpu_benchmark_move() local 96 &daddr, in amdgpu_benchmark_move() 102 r = amdgpu_benchmark_do_move(adev, size, saddr, daddr, n, &time_ms); in amdgpu_benchmark_move() 118 amdgpu_bo_free_kernel(&dobj, &daddr, NULL); in amdgpu_benchmark_move()
|
/drivers/net/ethernet/intel/ixgbe/ |
D | ixgbe_ipsec.c | 393 __be32 *daddr, u8 proto, in ixgbe_ipsec_find_rx_state() argument 405 ((ip4 && *daddr == rsa->xs->id.daddr.a4) || in ixgbe_ipsec_find_rx_state() 406 (!ip4 && !memcmp(daddr, &rsa->xs->id.daddr.a6, in ixgbe_ipsec_find_rx_state() 407 sizeof(rsa->xs->id.daddr.a6)))) && in ixgbe_ipsec_find_rx_state() 514 if (reg == (__force u32)xs->id.daddr.a4) in ixgbe_ipsec_check_mgmt_ip() 521 if (reg == (__force u32)xs->id.daddr.a4) in ixgbe_ipsec_check_mgmt_ip() 536 if (reg != (__force u32)xs->id.daddr.a6[j]) in ixgbe_ipsec_check_mgmt_ip() 546 if (reg != (__force u32)xs->id.daddr.a6[j]) in ixgbe_ipsec_check_mgmt_ip() 620 memcpy(rsa.ipaddr, &xs->id.daddr.a6, 16); in ixgbe_ipsec_add_sa() 622 memcpy(&rsa.ipaddr[3], &xs->id.daddr.a4, 4); in ixgbe_ipsec_add_sa() [all …]
|
/drivers/target/ |
D | target_core_sbc.c | 1165 void *daddr, *paddr; in sbc_dif_generate() local 1171 daddr = kmap_atomic(sg_page(dsg)) + dsg->offset; in sbc_dif_generate() 1180 kunmap_atomic(daddr - dsg->offset); in sbc_dif_generate() 1186 daddr = kmap_atomic(sg_page(dsg)) + dsg->offset; in sbc_dif_generate() 1191 crc = crc_t10dif(daddr + offset, avail); in sbc_dif_generate() 1193 kunmap_atomic(daddr - dsg->offset); in sbc_dif_generate() 1199 daddr = kmap_atomic(sg_page(dsg)) + dsg->offset; in sbc_dif_generate() 1201 crc = crc_t10dif_update(crc, daddr, offset); in sbc_dif_generate() 1221 kunmap_atomic(daddr - dsg->offset); in sbc_dif_generate() 1320 void *daddr, *paddr; in sbc_dif_verify() local [all …]
|
/drivers/infiniband/ulp/ipoib/ |
D | ipoib_main.c | 705 static void push_pseudo_header(struct sk_buff *skb, const char *daddr) in push_pseudo_header() argument 710 memcpy(phdr->hwaddr, daddr, INFINIBAND_ALEN); in push_pseudo_header() 823 if (ipoib_cm_enabled(dev, neigh->daddr)) { in path_rec_completion() 930 static void neigh_refresh_path(struct ipoib_neigh *neigh, u8 *daddr, in neigh_refresh_path() argument 939 path = __path_find(dev, daddr + 4); in neigh_refresh_path() 948 static struct ipoib_neigh *neigh_add_path(struct sk_buff *skb, u8 *daddr, in neigh_add_path() argument 958 neigh = ipoib_neigh_alloc(daddr, dev); in neigh_add_path() 974 path = __path_find(dev, daddr + 4); in neigh_add_path() 976 path = path_rec_create(dev, daddr + 4); in neigh_add_path() 989 if (ipoib_cm_enabled(dev, neigh->daddr)) { in neigh_add_path() [all …]
|
/drivers/misc/genwqe/ |
D | card_utils.c | 251 dma_addr_t daddr; in genwqe_map_pages() local 254 daddr = dma_map_page(&pci_dev->dev, page_list[i], in genwqe_map_pages() 259 if (dma_mapping_error(&pci_dev->dev, daddr)) { in genwqe_map_pages() 262 __func__, (long long)daddr); in genwqe_map_pages() 266 dma_list[i] = daddr; in genwqe_map_pages() 389 dma_addr_t daddr; in genwqe_setup_sgl() local 404 daddr = sgl->fpage_dma_addr + map_offs; in genwqe_setup_sgl() 408 daddr = sgl->lpage_dma_addr; in genwqe_setup_sgl() 410 daddr = dma_list[p] + map_offs; in genwqe_setup_sgl() 416 if (prev_daddr == daddr) { in genwqe_setup_sgl() [all …]
|
/drivers/staging/media/ipu3/ |
D | ipu3-dmamap.c | 133 map->daddr = iova_dma_addr(&imgu->iova_domain, iova); in imgu_dmamap_alloc() 136 size, &map->daddr, map->vaddr); in imgu_dmamap_alloc() 156 iova_pfn(&imgu->iova_domain, map->daddr)); in imgu_dmamap_unmap() 172 __func__, map->size, &map->daddr, map->vaddr); in imgu_dmamap_free() 220 map->daddr = iova_dma_addr(&imgu->iova_domain, iova); in imgu_dmamap_map_sg()
|
D | ipu3-css.h | 70 dma_addr_t daddr; member 207 unsigned int queue, dma_addr_t daddr) in imgu_css_buf_init() argument 211 b->daddr = daddr; in imgu_css_buf_init()
|
D | ipu3-css.c | 421 writel(css->binary[css->fw_sp[i]].daddr, in imgu_css_hw_init() 427 writel(css->binary[css->fw_bl].daddr, base + IMGU_REG_ISP_ICACHE_ADDR); in imgu_css_hw_init() 474 .ddr_data_addr = css->binary[css->fw_sp[sp]].daddr in imgu_css_hw_start_sp() 534 .src_addr = css->binary[css->fw_sp[j]].daddr in imgu_css_hw_start() 564 writel(css->xmem_sp_group_ptrs.daddr, in imgu_css_hw_start() 789 css_pipe->aux_frames[IPU3_CSS_AUX_FRAME_REF].mem[i].daddr; in imgu_css_pipeline_init() 791 css_pipe->aux_frames[IPU3_CSS_AUX_FRAME_REF].mem[i].daddr + in imgu_css_pipeline_init() 839 .mem[i].daddr; in imgu_css_pipeline_init() 890 css_pipe->binary_params_cs[i - 1][j].daddr; in imgu_css_pipeline_init() 994 sp_stage->xmem_bin_addr = css->binary[css_pipe->bindex].daddr; in imgu_css_pipeline_init() [all …]
|
/drivers/net/ethernet/mellanox/mlx5/core/en/ |
D | tc_tun.c | 156 n = dst_neigh_lookup(&rt->dst, &attr->fl.fl4.daddr); in mlx5e_route_lookup_ipv4_get() 240 attr.fl.fl4.daddr = tun_key->u.ipv4.dst; in mlx5e_tc_tun_create_header_ipv4() 294 ip->daddr = attr.fl.fl4.daddr; in mlx5e_tc_tun_create_header_ipv4() 355 attr.fl.fl4.daddr = tun_key->u.ipv4.dst; in mlx5e_tc_tun_update_header_ipv4() 398 ip->daddr = attr.fl.fl4.daddr; in mlx5e_tc_tun_update_header_ipv4() 469 n = dst_neigh_lookup(dst, &attr->fl.fl6.daddr); in mlx5e_route_lookup_ipv6_get() 510 attr.fl.fl6.daddr = tun_key->u.ipv6.dst; in mlx5e_tc_tun_create_header_ipv6() 562 ip6h->daddr = attr.fl.fl6.daddr; in mlx5e_tc_tun_create_header_ipv6() 624 attr.fl.fl6.daddr = tun_key->u.ipv6.dst; in mlx5e_tc_tun_update_header_ipv6() 665 ip6h->daddr = attr.fl.fl6.daddr; in mlx5e_tc_tun_update_header_ipv6() [all …]
|
/drivers/net/ethernet/mellanox/mlxsw/ |
D | spectrum_ipip.c | 86 return (union mlxsw_sp_l3addr) { .addr4 = parms->iph.daddr }; in mlxsw_sp_ipip_parms4_daddr() 158 .daddr = mlxsw_sp_ipip_parms4_daddr(&parms), in mlxsw_sp_ipip_netdev_parms_init_gre4() 226 union mlxsw_sp_l3addr daddr = mlxsw_sp_ipip_netdev_daddr(proto, ol_dev); in mlxsw_sp_ipip_tunnel_complete() local 234 !mlxsw_sp_l3addr_is_zero(daddr); in mlxsw_sp_ipip_tunnel_complete() 301 } else if (!mlxsw_sp_l3addr_eq(&new_parms->daddr, &old_parms->daddr)) { in mlxsw_sp_ipip_ol_netdev_change_gre() 459 .daddr = mlxsw_sp_ipip_parms6_daddr(&parms), in mlxsw_sp2_ipip_netdev_parms_init_gre6() 572 &ipip_entry->parms.daddr.addr6, in mlxsw_sp2_ipip_rem_addr_set_gre6() 580 mlxsw_sp_ipv6_addr_put(mlxsw_sp, &ipip_entry->parms.daddr.addr6); in mlxsw_sp2_ipip_rem_addr_unset_gre6()
|
D | spectrum_span.c | 356 union mlxsw_sp_l3addr daddr, in mlxsw_sp_span_entry_tunnel_parms_common() argument 366 gw = daddr; in mlxsw_sp_span_entry_tunnel_parms_common() 402 sparmsp->daddr = daddr; in mlxsw_sp_span_entry_tunnel_parms_common() 456 union mlxsw_sp_l3addr daddr = { .addr4 = tparm.iph.daddr }; in mlxsw_sp_span_entry_gretap4_parms() local 459 union mlxsw_sp_l3addr gw = daddr; in mlxsw_sp_span_entry_gretap4_parms() 468 mlxsw_sp_l3addr_is_zero(daddr)) in mlxsw_sp_span_entry_gretap4_parms() 472 return mlxsw_sp_span_entry_tunnel_parms_common(l3edev, saddr, daddr, gw, in mlxsw_sp_span_entry_gretap4_parms() 499 be32_to_cpu(sparms.daddr.addr4)); in mlxsw_sp_span_entry_gretap4_configure() 535 if (!ip6_tnl_xmit_ctl(t, &fl6.saddr, &fl6.daddr)) in mlxsw_sp_span_gretap6_route() 561 union mlxsw_sp_l3addr daddr = { .addr6 = tparm.raddr }; in mlxsw_sp_span_entry_gretap6_parms() local [all …]
|
/drivers/infiniband/core/ |
D | addr.c | 147 const void *daddr, in ib_nl_ip_send_msg() argument 183 nla_put(skb, attrtype, size, daddr); in ib_nl_ip_send_msg() 318 const void *daddr, u32 seq, u16 family) in ib_nl_fetch_ha() argument 323 return ib_nl_ip_send_msg(dev_addr, daddr, seq, family); in ib_nl_fetch_ha() 328 const void *daddr) in dst_fetch_ha() argument 333 n = dst_neigh_lookup(dst, daddr); in dst_fetch_ha() 370 const void *daddr = (dst_in->sa_family == AF_INET) ? in fetch_ha() local 379 return ib_nl_fetch_ha(dev_addr, daddr, seq, family); in fetch_ha() 381 return dst_fetch_ha(dst, dev_addr, daddr); in fetch_ha() 400 fl4.daddr = dst_ip; in addr4_resolve() [all …]
|
/drivers/infiniband/hw/usnic/ |
D | usnic_fwd.h | 112 uint32_t daddr, uint16_t dport) in usnic_fwd_init_udp_filter() argument 118 if (daddr) { in usnic_fwd_init_udp_filter() 120 filter->u.ipv4.dst_addr = daddr; in usnic_fwd_init_udp_filter()
|
/drivers/gpu/drm/ |
D | drm_fb_dma_helper.c | 129 dma_addr_t daddr; in drm_fb_dma_sync_non_coherent() local 137 daddr = drm_fb_dma_get_gem_addr(state->fb, state, i); in drm_fb_dma_sync_non_coherent() 145 dma_sync_single_for_device(drm->dev, daddr + offset, in drm_fb_dma_sync_non_coherent()
|
/drivers/net/ |
D | amt.c | 548 iph->daddr = htonl(INADDR_ALLHOSTS_GROUP); in amt_build_igmp_gq() 683 iph->daddr = amt->discovery_ip; in amt_send_discovery() 774 iph->daddr = amt->remote_ip; in amt_send_request() 837 ip6h->daddr = mld2_all_node; in amt_build_mld_gq() 840 if (ipv6_dev_get_saddr(amt->net, amt->dev, &ip6h->daddr, 0, in amt_build_mld_gq() 865 mld2q->mld2q_cksum = csum_ipv6_magic(&ip6h->saddr, &ip6h->daddr, in amt_build_mld_gq() 1019 fl4.daddr = amt->remote_ip; in amt_send_membership_update() 1042 fl4.daddr, in amt_send_membership_update() 1078 fl4.daddr = tunnel->ip4; in amt_send_multicast_data() 1099 fl4.daddr, in amt_send_multicast_data() [all …]
|
/drivers/video/fbdev/ |
D | hitfb.c | 78 u32 saddr, daddr; in hitfb_accel_bitblt() local 86 daddr = WIDTH * (dy + height) + dx + width; in hitfb_accel_bitblt() 99 daddr = WIDTH * dy + dx; in hitfb_accel_bitblt() 108 daddr <<= 1; in hitfb_accel_bitblt() 114 fb_writew(daddr & 0xffff, HD64461_BBTDSARL); in hitfb_accel_bitblt() 115 fb_writew(daddr >> 16, HD64461_BBTDSARH); in hitfb_accel_bitblt()
|
/drivers/net/wireguard/ |
D | socket.c | 25 .daddr = endpoint->addr4.sin_addr.s_addr, in send4() 85 udp_tunnel_xmit_skb(rt, sock, skb, fl.saddr, fl.daddr, ds, in send4() 103 .daddr = endpoint->addr6.sin6_addr, in send6() 152 udp_tunnel6_xmit_skb(dst, sock, skb, skb->dev, &fl.saddr, &fl.daddr, ds, in send6() 243 endpoint->src4.s_addr = ip_hdr(skb)->daddr; in wg_socket_endpoint_from_skb() 251 endpoint->src6 = ipv6_hdr(skb)->daddr; in wg_socket_endpoint_from_skb()
|
/drivers/net/arcnet/ |
D | arcnet.c | 65 unsigned short type, uint8_t daddr); 105 unsigned short type, const void *daddr, 646 unsigned short type, const void *daddr, in arcnet_header() argument 656 daddr ? *(uint8_t *)daddr : -1, in arcnet_header() 668 _daddr = daddr ? *(uint8_t *)daddr : 0; in arcnet_header() 669 } else if (!daddr) { in arcnet_header() 683 _daddr = *(uint8_t *)daddr; in arcnet_header() 1189 unsigned short type, uint8_t daddr) in null_build_header() argument 1195 lp->default_proto[daddr]); in null_build_header()
|