/drivers/net/ethernet/chelsio/cxgb4/ |
D | smt.c | 65 static struct smt_entry *find_or_alloc_smte(struct smt_data *s, u8 *smac) in find_or_alloc_smte() argument 79 if (memcmp(e->src_mac, smac, ETH_ALEN) == 0) in find_or_alloc_smte() 209 u8 *smac) in t4_smt_alloc_switching() argument 215 e = find_or_alloc_smte(s, smac); in t4_smt_alloc_switching() 222 memcpy(e->src_mac, smac, ETH_ALEN); in t4_smt_alloc_switching() 240 struct smt_entry *cxgb4_smt_alloc_switching(struct net_device *dev, u8 *smac) in cxgb4_smt_alloc_switching() argument 244 return t4_smt_alloc_switching(adap, 0x0, smac); in cxgb4_smt_alloc_switching()
|
D | smt.h | 73 struct smt_entry *cxgb4_smt_alloc_switching(struct net_device *dev, u8 *smac);
|
D | cxgb4_tc_flower.c | 49 PEDIT_FIELDS(ETH_, SMAC_15_0, 2, smac, 0), 50 PEDIT_FIELDS(ETH_, SMAC_47_16, 4, smac, 2),
|
/drivers/net/ethernet/mscc/ |
D | ocelot_ace.h | 97 struct ocelot_vcap_u48 smac; member 104 struct ocelot_vcap_u48 smac; member 112 struct ocelot_vcap_u48 smac; member 119 struct ocelot_vcap_u48 smac; member
|
D | ocelot_ace.c | 372 VCAP_KEY_BYTES_SET(L2_SMAC, etype->smac.value, in is2_entry_set() 373 etype->smac.mask); in is2_entry_set() 386 VCAP_KEY_BYTES_SET(L2_SMAC, llc->smac.value, llc->smac.mask); in is2_entry_set() 399 VCAP_KEY_BYTES_SET(L2_SMAC, snap->smac.value, snap->smac.mask); in is2_entry_set() 409 VCAP_KEY_BYTES_SET(MAC_ARP_L2_SMAC, arp->smac.value, in is2_entry_set() 410 arp->smac.mask); in is2_entry_set()
|
D | ocelot_flower.c | 90 ether_addr_copy(ocelot_rule->frame.etype.smac.value, in ocelot_flower_parse() 94 ether_addr_copy(ocelot_rule->frame.etype.smac.mask, in ocelot_flower_parse()
|
/drivers/infiniband/hw/bnxt_re/ |
D | qplib_sp.c | 281 struct bnxt_qplib_gid *gid, u8 *smac, u16 vlan_id, in bnxt_qplib_add_sgid() argument 345 req.src_mac[0] = cpu_to_be16(((u16 *)smac)[0]); in bnxt_qplib_add_sgid() 346 req.src_mac[1] = cpu_to_be16(((u16 *)smac)[1]); in bnxt_qplib_add_sgid() 347 req.src_mac[2] = cpu_to_be16(((u16 *)smac)[2]); in bnxt_qplib_add_sgid() 373 u8 *smac) in bnxt_qplib_update_sgid() argument 397 req.src_mac[0] = cpu_to_be16(((u16 *)smac)[0]); in bnxt_qplib_update_sgid() 398 req.src_mac[1] = cpu_to_be16(((u16 *)smac)[1]); in bnxt_qplib_update_sgid() 399 req.src_mac[2] = cpu_to_be16(((u16 *)smac)[2]); in bnxt_qplib_update_sgid()
|
D | qplib_fp.h | 281 u8 smac[6]; member 359 u8 smac[6]; member
|
D | qplib_sp.h | 234 struct bnxt_qplib_gid *gid, u16 gid_idx, u8 *smac);
|
D | ib_verbs.c | 954 ether_addr_copy(qp->qplib_qp.smac, rdev->netdev->dev_addr); in bnxt_re_create_shadow_qp() 1025 ether_addr_copy(qp->qplib_qp.smac, rdev->netdev->dev_addr); in bnxt_re_create_qp() 1637 &qp->qplib_qp.smac[0]); in bnxt_re_modify_qp() 1900 ether_addr_copy(qp->qp1_hdr.eth.smac_h, qp->qplib_qp.smac); in bnxt_re_build_qp1_send_v2() 3042 memcpy(wc->smac, orig_cqe->smac, ETH_ALEN); in bnxt_re_process_res_shadow_qp_wc() 3077 memcpy(wc->smac, cqe->smac, ETH_ALEN); in bnxt_re_process_res_ud_wc() 3175 memcpy(wc->smac, cqe->smac, ETH_ALEN); in bnxt_re_poll_cq()
|
D | qplib_fp.c | 1364 memcpy(qp->smac, sb->src_mac, 6); in bnxt_qplib_query_qp() 2436 memcpy(cqe->smac, hwcqe->src_mac, ETH_ALEN); in bnxt_qplib_cq_process_res_ud() 2540 memcpy(cqe->smac, qp->smac, 6); in bnxt_qplib_cq_process_res_raweth_qp1()
|
/drivers/infiniband/hw/mlx4/ |
D | qp.c | 1428 if (qp->pri.smac || (!qp->pri.smac && qp->pri.smac_port)) { in destroy_qp_common() 1429 mlx4_unregister_mac(dev->dev, qp->pri.smac_port, qp->pri.smac); in destroy_qp_common() 1430 qp->pri.smac = 0; in destroy_qp_common() 1433 if (qp->alt.smac) { in destroy_qp_common() 1434 mlx4_unregister_mac(dev->dev, qp->alt.smac_port, qp->alt.smac); in destroy_qp_common() 1435 qp->alt.smac = 0; in destroy_qp_common() 1797 u64 smac, u16 vlan_tag, struct mlx4_qp_path *path, in _mlx4_set_path() argument 1887 if ((!smac_info->smac && !smac_info->smac_port) || in _mlx4_set_path() 1888 smac_info->smac != smac) { in _mlx4_set_path() 1890 smac_index = mlx4_register_mac(dev->dev, port, smac); in _mlx4_set_path() [all …]
|
D | cq.c | 603 memcpy(&(wc->smac[0]), (char *)&hdr->tun.mac_31_0, 4); in use_tunnel_data() 604 memcpy(&(wc->smac[4]), (char *)&hdr->tun.slid_mac_47_32, 2); in use_tunnel_data() 869 memcpy(wc->smac, cqe->smac, ETH_ALEN); in mlx4_ib_poll_one()
|
/drivers/net/ethernet/broadcom/bnxt/ |
D | bnxt_tc.c | 208 ether_addr_copy(flow->l2_key.smac, match.key->src); in bnxt_tc_parse_flow() 209 ether_addr_copy(flow->l2_mask.smac, match.mask->src); in bnxt_tc_parse_flow() 416 memcpy(req.smac, flow->l2_key.smac, ETH_ALEN); in bnxt_hwrm_cfa_flow_alloc() 483 memcpy(&req.l2_rewrite_smac, &req.smac, ETH_ALEN); in bnxt_hwrm_cfa_flow_alloc() 491 memcpy(&req.l2_rewrite_smac, &req.smac, ETH_ALEN); in bnxt_hwrm_cfa_flow_alloc() 624 ether_addr_copy(encap->src_mac_addr, l2_info->smac); in hwrm_cfa_encap_record_alloc() 779 if (bits_set(&flow->l2_key.smac, sizeof(flow->l2_key.smac)) && in bnxt_tc_can_offload() 780 !is_exactmatch(flow->l2_mask.smac, sizeof(flow->l2_mask.smac))) { in bnxt_tc_can_offload() 1013 ether_addr_copy(l2_info->smac, dst_dev->dev_addr); in bnxt_tc_resolve_tunnel_hdrs() 1068 ether_addr_copy(decap_l2_info->dmac, l2_info.smac); in bnxt_tc_get_decap_handle()
|
D | bnxt_tc.h | 21 u8 smac[ETH_ALEN]; member
|
/drivers/net/ethernet/mellanox/mlxsw/ |
D | spectrum_span.h | 33 unsigned char smac[ETH_ALEN]; member
|
D | spectrum_span.c | 282 memcpy(sparmsp->smac, edev->dev_addr, ETH_ALEN); in mlxsw_sp_span_entry_tunnel_parms_common() 375 sparms.ttl, sparms.smac, in mlxsw_sp_span_entry_gretap4_configure() 475 mlxsw_reg_mpat_eth_rspan_l3_ipv6_pack(mpat_pl, sparms.ttl, sparms.smac, in mlxsw_sp_span_entry_gretap6_configure()
|
/drivers/infiniband/hw/qedr/ |
D | qedr_roce_cm.c | 112 *((u32 *)&qp->rqe_wr_id[qp->rq.gsi_cons].smac[0]) = in qedr_ll2_complete_rx_packet() 114 *((u16 *)&qp->rqe_wr_id[qp->rq.gsi_cons].smac[4]) = in qedr_ll2_complete_rx_packet() 695 ether_addr_copy(wc[i].smac, qp->rqe_wr_id[qp->rq.cons].smac); in qedr_gsi_poll_cq()
|
D | qedr.h | 432 u8 smac[ETH_ALEN]; member
|
/drivers/net/ethernet/mellanox/mlx5/core/diag/ |
D | fs_tracepoint.c | 68 DECLARE_MASK_VAL(u64, smac) = { in print_lyr_2_4_hdrs() 80 PRINT_MASKED_VALP(smac, u8 *, p, "%pM"); in print_lyr_2_4_hdrs()
|
/drivers/infiniband/hw/ocrdma/ |
D | ocrdma_ah.c | 119 memcpy(ð.smac[0], &dev->nic_info.mac_addr[0], ETH_ALEN); in set_av_attr()
|
D | ocrdma_sli.h | 1895 u8 smac[6]; member 1901 u8 smac[6]; member
|
/drivers/infiniband/core/ |
D | cache.c | 1329 u16 *vlan_id, u8 *smac) in rdma_read_gid_l2_fields() argument 1339 if (smac) in rdma_read_gid_l2_fields() 1340 ether_addr_copy(smac, ndev->dev_addr); in rdma_read_gid_l2_fields()
|
/drivers/infiniband/hw/hns/ |
D | hns_roce_hw_v2.c | 252 u8 *smac; in hns_roce_v2_post_send() local 321 smac = (u8 *)hr_dev->dev_addr[qp->port]; in hns_roce_v2_post_send() 323 smac) ? 1 : 0; in hns_roce_v2_post_send() 2943 memcpy(wc->smac, cqe->smac, 4); in hns_roce_v2_poll_one() 2944 wc->smac[4] = roce_get_field(cqe->byte_28, in hns_roce_v2_poll_one() 2947 wc->smac[5] = roce_get_field(cqe->byte_28, in hns_roce_v2_poll_one() 3682 u8 *smac; in modify_qp_init_to_rtr() local 3824 smac = (u8 *)hr_dev->dev_addr[port]; in modify_qp_init_to_rtr() 3826 if (ether_addr_equal_unaligned(dmac, smac) || in modify_qp_init_to_rtr()
|
D | hns_roce_hw_v1.c | 80 u8 *smac; in hns_roce_v1_post_send() local 140 smac = (u8 *)hr_dev->dev_addr[qp->port]; in hns_roce_v1_post_send() 142 smac) ? 1 : 0; in hns_roce_v1_post_send() 2735 u8 *smac; in hns_roce_v1_m_qp() local 2932 smac = (u8 *)hr_dev->dev_addr[port]; in hns_roce_v1_m_qp() 2934 if (ether_addr_equal_unaligned(dmac, smac) || in hns_roce_v1_m_qp()
|