/drivers/edac/ |
D | pnd2_edac.c | 93 struct dram_addr *daddr, char *msg); 927 struct dram_addr *daddr, char *msg) in apl_pmi2mem() argument 984 daddr->col = column; in apl_pmi2mem() 985 daddr->bank = bank; in apl_pmi2mem() 986 daddr->row = row; in apl_pmi2mem() 987 daddr->rank = rank; in apl_pmi2mem() 988 daddr->dimm = 0; in apl_pmi2mem() 997 struct dram_addr *daddr, char *msg) in dnv_pmi2mem() argument 1000 daddr->rank = dnv_get_bit(pmiaddr, dmap[pmiidx].rs0 + 13, 0); in dnv_pmi2mem() 1002 daddr->rank |= dnv_get_bit(pmiaddr, dmap[pmiidx].rs1 + 13, 1); in dnv_pmi2mem() [all …]
|
/drivers/infiniband/sw/rxe/ |
D | rxe_net.c | 47 struct in_addr *daddr) in rxe_find_route4() argument 55 memcpy(&fl.daddr, daddr, sizeof(*daddr)); in rxe_find_route4() 60 pr_err_ratelimited("no route to %pI4\n", &daddr->s_addr); in rxe_find_route4() 70 struct in6_addr *daddr) in rxe_find_route6() argument 78 memcpy(&fl6.daddr, daddr, sizeof(*daddr)); in rxe_find_route6() 85 pr_err_ratelimited("no route to %pI6\n", daddr); in rxe_find_route6() 90 pr_err("no route to %pI6\n", daddr); in rxe_find_route6() 104 struct in6_addr *daddr) in rxe_find_route6() argument 126 struct in_addr *daddr; in rxe_find_route() local 129 daddr = &av->dgid_addr._sockaddr_in.sin_addr; in rxe_find_route() [all …]
|
D | rxe_recv.c | 128 struct in_addr *daddr = in check_addr() local 131 if (ip_hdr(skb)->daddr != saddr->s_addr) { in check_addr() 133 &ip_hdr(skb)->daddr, in check_addr() 138 if (ip_hdr(skb)->saddr != daddr->s_addr) { in check_addr() 141 &daddr->s_addr); in check_addr() 148 struct in6_addr *daddr = in check_addr() local 151 if (memcmp(&ipv6_hdr(skb)->daddr, saddr, sizeof(*saddr))) { in check_addr() 153 &ipv6_hdr(skb)->daddr, saddr); in check_addr() 157 if (memcmp(&ipv6_hdr(skb)->saddr, daddr, sizeof(*daddr))) { in check_addr() 159 &ipv6_hdr(skb)->saddr, daddr); in check_addr() [all …]
|
/drivers/staging/wlan-ng/ |
D | p80211conv.c | 178 memcpy(p80211_hdr->a3.a1, &e_hdr.daddr, ETH_ALEN); in skb_ether_to_p80211() 186 memcpy(p80211_hdr->a3.a3, &e_hdr.daddr, ETH_ALEN); in skb_ether_to_p80211() 190 memcpy(p80211_hdr->a3.a1, &e_hdr.daddr, ETH_ALEN); in skb_ether_to_p80211() 282 u8 daddr[ETH_ALEN]; in skb_p80211_to_ether() local 299 ether_addr_copy(daddr, w_hdr->a3.a1); in skb_p80211_to_ether() 303 ether_addr_copy(daddr, w_hdr->a3.a1); in skb_p80211_to_ether() 307 ether_addr_copy(daddr, w_hdr->a3.a3); in skb_p80211_to_ether() 316 ether_addr_copy(daddr, w_hdr->a4.a3); in skb_p80211_to_ether() 362 ((!ether_addr_equal_unaligned(daddr, e_hdr->daddr)) || in skb_p80211_to_ether() 409 ether_addr_copy(e_hdr->daddr, daddr); in skb_p80211_to_ether() [all …]
|
/drivers/net/ethernet/intel/ixgbevf/ |
D | ipsec.c | 34 memcpy(sam->addr, &xs->id.daddr.a6, sizeof(xs->id.daddr.a6)); in ixgbevf_ipsec_set_pf_sa() 36 memcpy(sam->addr, &xs->id.daddr.a4, sizeof(xs->id.daddr.a4)); in ixgbevf_ipsec_set_pf_sa() 179 __be32 *daddr, u8 proto, in ixgbevf_ipsec_find_rx_state() argument 189 ((ip4 && *daddr == rsa->xs->id.daddr.a4) || in ixgbevf_ipsec_find_rx_state() 190 (!ip4 && !memcmp(daddr, &rsa->xs->id.daddr.a6, in ixgbevf_ipsec_find_rx_state() 191 sizeof(rsa->xs->id.daddr.a6)))) && in ixgbevf_ipsec_find_rx_state() 315 memcpy(rsa.ipaddr, &xs->id.daddr.a6, 16); in ixgbevf_ipsec_add_sa() 317 memcpy(&rsa.ipaddr[3], &xs->id.daddr.a4, 4); in ixgbevf_ipsec_add_sa() 563 void *daddr; in ixgbevf_ipsec_rx() local 575 daddr = &ip4->daddr; in ixgbevf_ipsec_rx() [all …]
|
/drivers/net/ethernet/mellanox/mlx5/core/accel/ |
D | ipsec.c | 111 __be32 saddr[4] = {}, daddr[4] = {}; in mlx5_accel_esp_create_hw_context() local 118 daddr[3] = xfrm->attrs.daddr.a4; in mlx5_accel_esp_create_hw_context() 121 memcpy(daddr, xfrm->attrs.daddr.a6, sizeof(daddr)); in mlx5_accel_esp_create_hw_context() 124 return ipsec_ops->create_hw_context(mdev, xfrm, saddr, daddr, xfrm->attrs.spi, in mlx5_accel_esp_create_hw_context()
|
/drivers/gpu/drm/radeon/ |
D | radeon_benchmark.c | 36 uint64_t saddr, uint64_t daddr, in radeon_benchmark_do_move() argument 49 fence = radeon_copy_dma(rdev, saddr, daddr, in radeon_benchmark_do_move() 54 fence = radeon_copy_blit(rdev, saddr, daddr, in radeon_benchmark_do_move() 92 uint64_t saddr, daddr; in radeon_benchmark_move() local 116 r = radeon_bo_pin(dobj, ddomain, &daddr); in radeon_benchmark_move() 123 time = radeon_benchmark_do_move(rdev, size, saddr, daddr, in radeon_benchmark_move() 134 time = radeon_benchmark_do_move(rdev, size, saddr, daddr, in radeon_benchmark_move()
|
/drivers/net/ethernet/chelsio/libcxgb/ |
D | libcxgb_cm.c | 59 __func__, ntohl(ip->saddr), ntohl(ip->daddr), in cxgb_get_4tuple() 63 memcpy(local_ip, &ip->daddr, 4); in cxgb_get_4tuple() 66 __func__, ip6->saddr.s6_addr, ip6->daddr.s6_addr, in cxgb_get_4tuple() 70 memcpy(local_ip, ip6->daddr.s6_addr, 16); in cxgb_get_4tuple() 132 memcpy(&fl6.daddr, peer_ip, 16); in cxgb_find_route6() 134 if (ipv6_addr_type(&fl6.daddr) & IPV6_ADDR_LINKLOCAL) in cxgb_find_route6()
|
/drivers/net/ethernet/intel/ixgbe/ |
D | ixgbe_ipsec.c | 393 __be32 *daddr, u8 proto, in ixgbe_ipsec_find_rx_state() argument 405 ((ip4 && *daddr == rsa->xs->id.daddr.a4) || in ixgbe_ipsec_find_rx_state() 406 (!ip4 && !memcmp(daddr, &rsa->xs->id.daddr.a6, in ixgbe_ipsec_find_rx_state() 407 sizeof(rsa->xs->id.daddr.a6)))) && in ixgbe_ipsec_find_rx_state() 514 if (reg == xs->id.daddr.a4) in ixgbe_ipsec_check_mgmt_ip() 521 if (reg == xs->id.daddr.a4) in ixgbe_ipsec_check_mgmt_ip() 536 if (reg != xs->id.daddr.a6[j]) in ixgbe_ipsec_check_mgmt_ip() 546 if (reg != xs->id.daddr.a6[j]) in ixgbe_ipsec_check_mgmt_ip() 620 memcpy(rsa.ipaddr, &xs->id.daddr.a6, 16); in ixgbe_ipsec_add_sa() 622 memcpy(&rsa.ipaddr[3], &xs->id.daddr.a4, 4); in ixgbe_ipsec_add_sa() [all …]
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_benchmark.c | 32 uint64_t saddr, uint64_t daddr, int n) in amdgpu_benchmark_do_move() argument 42 r = amdgpu_copy_buffer(ring, saddr, daddr, size, NULL, &fence, in amdgpu_benchmark_do_move() 77 uint64_t saddr, daddr; in amdgpu_benchmark_move() local 125 daddr = amdgpu_bo_gpu_offset(dobj); in amdgpu_benchmark_move() 128 time = amdgpu_benchmark_do_move(adev, size, saddr, daddr, n); in amdgpu_benchmark_move()
|
/drivers/target/ |
D | target_core_sbc.c | 1227 void *daddr, *paddr; in sbc_dif_generate() local 1233 daddr = kmap_atomic(sg_page(dsg)) + dsg->offset; in sbc_dif_generate() 1242 kunmap_atomic(daddr - dsg->offset); in sbc_dif_generate() 1248 daddr = kmap_atomic(sg_page(dsg)) + dsg->offset; in sbc_dif_generate() 1253 crc = crc_t10dif(daddr + offset, avail); in sbc_dif_generate() 1255 kunmap_atomic(daddr - dsg->offset); in sbc_dif_generate() 1261 daddr = kmap_atomic(sg_page(dsg)) + dsg->offset; in sbc_dif_generate() 1263 crc = crc_t10dif_update(crc, daddr, offset); in sbc_dif_generate() 1283 kunmap_atomic(daddr - dsg->offset); in sbc_dif_generate() 1382 void *daddr, *paddr; in sbc_dif_verify() local [all …]
|
/drivers/infiniband/ulp/ipoib/ |
D | ipoib_main.c | 701 static void push_pseudo_header(struct sk_buff *skb, const char *daddr) in push_pseudo_header() argument 706 memcpy(phdr->hwaddr, daddr, INFINIBAND_ALEN); in push_pseudo_header() 819 if (ipoib_cm_enabled(dev, neigh->daddr)) { in path_rec_completion() 926 static void neigh_refresh_path(struct ipoib_neigh *neigh, u8 *daddr, in neigh_refresh_path() argument 935 path = __path_find(dev, daddr + 4); in neigh_refresh_path() 944 static struct ipoib_neigh *neigh_add_path(struct sk_buff *skb, u8 *daddr, in neigh_add_path() argument 954 neigh = ipoib_neigh_alloc(daddr, dev); in neigh_add_path() 970 path = __path_find(dev, daddr + 4); in neigh_add_path() 972 path = path_rec_create(dev, daddr + 4); in neigh_add_path() 985 if (ipoib_cm_enabled(dev, neigh->daddr)) { in neigh_add_path() [all …]
|
/drivers/misc/genwqe/ |
D | card_utils.c | 251 dma_addr_t daddr; in genwqe_map_pages() local 254 daddr = pci_map_page(pci_dev, page_list[i], in genwqe_map_pages() 259 if (pci_dma_mapping_error(pci_dev, daddr)) { in genwqe_map_pages() 262 __func__, (long long)daddr); in genwqe_map_pages() 266 dma_list[i] = daddr; in genwqe_map_pages() 389 dma_addr_t daddr; in genwqe_setup_sgl() local 404 daddr = sgl->fpage_dma_addr + map_offs; in genwqe_setup_sgl() 408 daddr = sgl->lpage_dma_addr; in genwqe_setup_sgl() 410 daddr = dma_list[p] + map_offs; in genwqe_setup_sgl() 416 if (prev_daddr == daddr) { in genwqe_setup_sgl() [all …]
|
/drivers/staging/media/ipu3/ |
D | ipu3-dmamap.c | 133 map->daddr = iova_dma_addr(&imgu->iova_domain, iova); in imgu_dmamap_alloc() 136 size, &map->daddr, map->vaddr); in imgu_dmamap_alloc() 156 iova_pfn(&imgu->iova_domain, map->daddr)); in imgu_dmamap_unmap() 172 __func__, map->size, &map->daddr, map->vaddr); in imgu_dmamap_free() 220 map->daddr = iova_dma_addr(&imgu->iova_domain, iova); in imgu_dmamap_map_sg()
|
D | ipu3-css.h | 70 dma_addr_t daddr; member 208 unsigned int queue, dma_addr_t daddr) in imgu_css_buf_init() argument 212 b->daddr = daddr; in imgu_css_buf_init()
|
D | ipu3-css.c | 434 writel(css->binary[css->fw_sp[i]].daddr, in imgu_css_hw_init() 440 writel(css->binary[css->fw_bl].daddr, base + IMGU_REG_ISP_ICACHE_ADDR); in imgu_css_hw_init() 487 .ddr_data_addr = css->binary[css->fw_sp[sp]].daddr in imgu_css_hw_start_sp() 547 .src_addr = css->binary[css->fw_sp[j]].daddr in imgu_css_hw_start() 577 writel(css->xmem_sp_group_ptrs.daddr, in imgu_css_hw_start() 802 css_pipe->aux_frames[IPU3_CSS_AUX_FRAME_REF].mem[i].daddr; in imgu_css_pipeline_init() 804 css_pipe->aux_frames[IPU3_CSS_AUX_FRAME_REF].mem[i].daddr + in imgu_css_pipeline_init() 852 .mem[i].daddr; in imgu_css_pipeline_init() 903 css_pipe->binary_params_cs[i - 1][j].daddr; in imgu_css_pipeline_init() 1007 sp_stage->xmem_bin_addr = css->binary[css_pipe->bindex].daddr; in imgu_css_pipeline_init() [all …]
|
/drivers/infiniband/core/ |
D | addr.c | 148 const void *daddr, in ib_nl_ip_send_msg() argument 184 nla_put(skb, attrtype, size, daddr); in ib_nl_ip_send_msg() 319 const void *daddr, u32 seq, u16 family) in ib_nl_fetch_ha() argument 324 return ib_nl_ip_send_msg(dev_addr, daddr, seq, family); in ib_nl_fetch_ha() 329 const void *daddr) in dst_fetch_ha() argument 334 n = dst_neigh_lookup(dst, daddr); in dst_fetch_ha() 371 const void *daddr = (dst_in->sa_family == AF_INET) ? in fetch_ha() local 380 return ib_nl_fetch_ha(dev_addr, daddr, seq, family); in fetch_ha() 382 return dst_fetch_ha(dst, dev_addr, daddr); in fetch_ha() 401 fl4.daddr = dst_ip; in addr4_resolve() [all …]
|
/drivers/infiniband/hw/usnic/ |
D | usnic_fwd.h | 112 uint32_t daddr, uint16_t dport) in usnic_fwd_init_udp_filter() argument 118 if (daddr) { in usnic_fwd_init_udp_filter() 120 filter->u.ipv4.dst_addr = daddr; in usnic_fwd_init_udp_filter()
|
/drivers/video/fbdev/ |
D | hitfb.c | 78 u32 saddr, daddr; in hitfb_accel_bitblt() local 86 daddr = WIDTH * (dy + height) + dx + width; in hitfb_accel_bitblt() 99 daddr = WIDTH * dy + dx; in hitfb_accel_bitblt() 108 daddr <<= 1; in hitfb_accel_bitblt() 114 fb_writew(daddr & 0xffff, HD64461_BBTDSARL); in hitfb_accel_bitblt() 115 fb_writew(daddr >> 16, HD64461_BBTDSARH); in hitfb_accel_bitblt()
|
/drivers/net/ethernet/mellanox/mlxsw/ |
D | spectrum_span.c | 355 union mlxsw_sp_l3addr daddr, in mlxsw_sp_span_entry_tunnel_parms_common() argument 365 gw = daddr; in mlxsw_sp_span_entry_tunnel_parms_common() 401 sparmsp->daddr = daddr; in mlxsw_sp_span_entry_tunnel_parms_common() 454 union mlxsw_sp_l3addr daddr = { .addr4 = tparm.iph.daddr }; in mlxsw_sp_span_entry_gretap4_parms() local 457 union mlxsw_sp_l3addr gw = daddr; in mlxsw_sp_span_entry_gretap4_parms() 466 mlxsw_sp_l3addr_is_zero(daddr)) in mlxsw_sp_span_entry_gretap4_parms() 470 return mlxsw_sp_span_entry_tunnel_parms_common(l3edev, saddr, daddr, gw, in mlxsw_sp_span_entry_gretap4_parms() 497 be32_to_cpu(sparms.daddr.addr4)); in mlxsw_sp_span_entry_gretap4_configure() 533 if (!ip6_tnl_xmit_ctl(t, &fl6.saddr, &fl6.daddr)) in mlxsw_sp_span_gretap6_route() 559 union mlxsw_sp_l3addr daddr = { .addr6 = tparm.raddr }; in mlxsw_sp_span_entry_gretap6_parms() local [all …]
|
/drivers/net/ethernet/mellanox/mlx5/core/en/ |
D | tc_tun.c | 124 n = dst_neigh_lookup(&rt->dst, &fl4->daddr); in mlx5e_route_lookup_ipv4_get() 202 fl4.daddr = tun_key->u.ipv4.dst; in mlx5e_tc_tun_create_header_ipv4() 261 ip->daddr = fl4.daddr; in mlx5e_tc_tun_create_header_ipv4() 332 n = dst_neigh_lookup(dst, &fl6->daddr); in mlx5e_route_lookup_ipv6_get() 368 fl6.daddr = tun_key->u.ipv6.dst; in mlx5e_tc_tun_create_header_ipv6() 425 ip6h->daddr = fl6.daddr; in mlx5e_tc_tun_create_header_ipv6()
|
/drivers/net/ethernet/aquantia/atlantic/ |
D | aq_ring.c | 23 dma_unmap_page(dev, rxpage->daddr, len, DMA_FROM_DEVICE); in aq_free_rxpage() 35 dma_addr_t daddr; in aq_get_rxpage() local 41 daddr = dma_map_page(dev, page, 0, PAGE_SIZE << order, in aq_get_rxpage() 44 if (unlikely(dma_mapping_error(dev, daddr))) in aq_get_rxpage() 48 rxpage->daddr = daddr; in aq_get_rxpage() 423 buff->rxdata.daddr, in aq_ring_rx_clean() 465 buff_->rxdata.daddr, in aq_ring_rx_clean()
|
/drivers/net/arcnet/ |
D | rfc1051.c | 43 unsigned short type, uint8_t daddr); 162 unsigned short type, uint8_t daddr) in build_header() argument 202 pkt->hard.dest = daddr; in build_header()
|
/drivers/net/ipvlan/ |
D | ipvlan_l3s.c | 54 err = ip_route_input_noref(skb, ip4h->daddr, ip4h->saddr, in ipvlan_l3_rcv() 68 .daddr = ip6h->daddr, in ipvlan_l3_rcv()
|
/drivers/net/wireguard/ |
D | socket.c | 25 .daddr = endpoint->addr4.sin_addr.s_addr, in send4() 85 udp_tunnel_xmit_skb(rt, sock, skb, fl.saddr, fl.daddr, ds, in send4() 103 .daddr = endpoint->addr6.sin6_addr, in send6() 152 udp_tunnel6_xmit_skb(dst, sock, skb, skb->dev, &fl.saddr, &fl.daddr, ds, in send6() 243 endpoint->src4.s_addr = ip_hdr(skb)->daddr; in wg_socket_endpoint_from_skb() 251 endpoint->src6 = ipv6_hdr(skb)->daddr; in wg_socket_endpoint_from_skb()
|