Home
last modified time | relevance | path

Searched refs:slid (Results 1 – 25 of 39) sorted by relevance

12

/drivers/infiniband/hw/hfi1/
Dud.c71 u32 slid; in ud_loopback() local
75 slid = ppd->lid | (rdma_ah_get_path_bits(ah_attr) & in ud_loopback()
79 slid, false))) { in ud_loopback()
83 slid, rdma_ah_get_dlid(ah_attr)); in ud_loopback()
207 wc.slid = (ppd->lid | (rdma_ah_get_path_bits(ah_attr) & in ud_loopback()
210 if (wc.slid == 0 && sqp->ibqp.qp_type == IB_QPT_GSI) in ud_loopback()
211 wc.slid = be16_to_cpu(IB_LID_PERMISSIVE); in ud_loopback()
265 u16 len, slid, dlid, pkey; in hfi1_make_ud_req_9B() local
311 slid = be16_to_cpu(IB_LID_PERMISSIVE); in hfi1_make_ud_req_9B()
318 slid = lid; in hfi1_make_ud_req_9B()
[all …]
Druc.c34 u32 slid = packet->slid; in hfi1_ruc_check_hdr() local
63 sc5, slid))) { in hfi1_ruc_check_hdr()
65 slid, dlid); in hfi1_ruc_check_hdr()
69 if (slid != rdma_ah_get_dlid(&qp->alt_ah_attr) || in hfi1_ruc_check_hdr()
100 sc5, slid))) { in hfi1_ruc_check_hdr()
102 slid, dlid); in hfi1_ruc_check_hdr()
106 if ((slid != rdma_ah_get_dlid(&qp->remote_ah_attr)) || in hfi1_ruc_check_hdr()
240 u32 slid; in hfi1_make_ruc_header_16B() local
291 slid = be32_to_cpu(OPA_LID_PERMISSIVE); in hfi1_make_ruc_header_16B()
293 slid = ppd->lid | in hfi1_make_ruc_header_16B()
[all …]
Dtrace_ibhdrs.h80 u16 *len, u32 *dlid, u32 *slid);
89 u32 *dlid, u32 *slid);
95 u16 len, u16 pkey, u32 dlid, u32 slid);
144 __field(u32, slid)
170 &__entry->slid);
196 &__entry->slid);
240 __entry->slid),
303 __field(u32, slid)
330 &__entry->slid);
361 &__entry->slid);
[all …]
Dhfi.h347 u32 slid; member
970 u32 slid; member
1700 u16 pkey, u32 slid, u32 dlid, u8 sc5,
1703 u32 remote_qpn, u16 pkey, u32 slid, u32 dlid,
1706 u32 remote_qpn, u16 pkey, u32 slid, u32 dlid,
1710 int egress_pkey_check(struct hfi1_pportdata *ppd, u32 slid, u16 pkey,
1792 u32 slid) in ingress_pkey_table_fail() argument
1799 dd->err_info_rcv_constraint.slid = slid; in ingress_pkey_table_fail()
1813 u8 sc5, u8 idx, u32 slid, bool force) in ingress_pkey_check() argument
1835 ingress_pkey_table_fail(ppd, pkey, slid); in ingress_pkey_check()
[all …]
Dtrace.c157 u16 *len, u32 *dlid, u32 *slid) in hfi1_trace_parse_9b_hdr() argument
165 *slid = ib_get_slid(hdr); in hfi1_trace_parse_9b_hdr()
172 u32 *dlid, u32 *slid) in hfi1_trace_parse_16b_hdr() argument
184 *slid = hfi1_16B_get_slid(hdr); in hfi1_trace_parse_16b_hdr()
195 u16 len, u16 pkey, u32 dlid, u32 slid) in hfi1_trace_fmt_lrh() argument
199 trace_seq_printf(p, LRH_PRN, len, sc, dlid, slid); in hfi1_trace_fmt_lrh()
Dipoib_tx.c303 u16 slid; in hfi1_ipoib_build_ib_tx_headers() local
341 slid = be16_to_cpu(IB_LID_PERMISSIVE); in hfi1_ipoib_build_ib_tx_headers()
348 slid = lid; in hfi1_ipoib_build_ib_tx_headers()
350 slid = be16_to_cpu(IB_LID_PERMISSIVE); in hfi1_ipoib_build_ib_tx_headers()
359 hfi1_make_ib_hdr(&sdma_hdr->hdr.ibh, lrh0, dwords, dlid, slid); in hfi1_ipoib_build_ib_tx_headers()
Ddriver.c415 u32 rlid, slid, dlid = 0; in hfi1_process_ecn_slowpath() local
425 slid = hfi1_16B_get_slid(pkt->hdr); in hfi1_process_ecn_slowpath()
436 slid = ib_get_slid(pkt->hdr); in hfi1_process_ecn_slowpath()
447 rlid = slid; in hfi1_process_ecn_slowpath()
453 rlid = slid; in hfi1_process_ecn_slowpath()
1454 if ((!packet->slid) || (!packet->dlid)) in hfi1_bypass_ingress_pkt_check()
1508 packet->slid = ib_get_slid(hdr); in hfi1_setup_9B_packet()
1589 packet->slid = hfi1_16B_get_slid(packet->hdr); in hfi1_setup_bypass_packet()
Dverbs.c450 packet->slid, true); in hfi1_do_pkey_check()
1116 int egress_pkey_check(struct hfi1_pportdata *ppd, u32 slid, u16 pkey, in egress_pkey_check() argument
1160 dd->err_info_xmit_constraint.slid = slid; in egress_pkey_check()
1221 u32 slid; in hfi1_verbs_send() local
1234 slid = hfi1_16B_get_slid(hdr); in hfi1_verbs_send()
1244 slid = ib_get_slid(hdr); in hfi1_verbs_send()
1254 ret = egress_pkey_check(dd->pport, slid, pkey, in hfi1_verbs_send()
Duser_sdma.c313 u32 slid; in hfi1_user_sdma_process_request() local
448 slid = be16_to_cpu(req->hdr.lrh[3]); in hfi1_user_sdma_process_request()
449 if (egress_pkey_check(dd->pport, slid, pkey, sc, PKEY_CHECK_INVALID)) { in hfi1_user_sdma_process_request()
Dmad.c2516 __be32 slid; member
2524 __be32 slid; member
3405 rsp->port_xmit_constraint_ei.slid = in pma_get_opa_errorinfo()
3406 cpu_to_be32(dd->err_info_xmit_constraint.slid); in pma_get_opa_errorinfo()
3412 rsp->port_rcv_constraint_ei.slid = in pma_get_opa_errorinfo()
3413 cpu_to_be32(dd->err_info_rcv_constraint.slid); in pma_get_opa_errorinfo()
4365 return (in_wc->slid == ppd->lid); in is_local_mad()
4408 ingress_pkey_table_fail(ppd, pkey, in_wc->slid); in opa_local_smp_check()
Duc.c414 wc.slid = rdma_ah_get_dlid(&qp->remote_ah_attr) & U16_MAX; in hfi1_uc_rcv()
/drivers/infiniband/core/
Duverbs_marshall.c143 dst->slid = htons(ntohl(sa_path_get_slid(src))); in __ib_copy_path_rec_to_user()
178 u32 slid, dlid; in ib_copy_path_rec_from_user() local
184 slid = opa_get_lid_from_gid((union ib_gid *)src->sgid); in ib_copy_path_rec_from_user()
188 slid = ntohs(src->slid); in ib_copy_path_rec_from_user()
195 sa_path_set_slid(dst, slid); in ib_copy_path_rec_from_user()
Dmad_rmpp.c66 u32 slid; member
311 rmpp_recv->slid = mad_recv_wc->wc->slid; in create_rmpp_recv()
332 rmpp_recv->slid == mad_recv_wc->wc->slid && in find_rmpp_recv()
865 if (rmpp_recv->slid == rdma_ah_get_dlid(&ah_attr)) { in init_newwin()
Dcm.c1327 pri_path->opa.slid); in cm_format_req()
1368 OPA_MAKE_ID(be32_to_cpu(pri_path->opa.slid)); in cm_format_req()
1406 alt_path->opa.slid); in cm_format_req()
1416 OPA_MAKE_ID(be32_to_cpu(alt_path->opa.slid)); in cm_format_req()
1641 sa_path_set_dlid(primary_path, wc->slid); in cm_format_path_lid_from_req()
2038 be16_to_cpu(ib_lid_be16(wc->slid))); in cm_process_routed_req()
2052 be16_to_cpu(ib_lid_be16(wc->slid))); in cm_process_routed_req()
3569 cm_id_priv->sidr_slid = wc->slid; in cm_sidr_req_handler()
/drivers/infiniband/hw/mthca/
Dmthca_mad.c205 u16 slid = in_wc ? ib_lid_cpu16(in_wc->slid) : be16_to_cpu(IB_LID_PERMISSIVE); in mthca_process_mad() local
210 if (in->mad_hdr.method == IB_MGMT_METHOD_TRAP && !slid) { in mthca_process_mad()
/drivers/infiniband/hw/mlx5/
Dmad.c233 u16 slid; in mlx5_ib_process_mad() local
236 slid = in_wc ? ib_lid_cpu16(in_wc->slid) : in mlx5_ib_process_mad()
239 if (method == IB_MGMT_METHOD_TRAP && !slid) in mlx5_ib_process_mad()
Dcq.c240 wc->slid = be16_to_cpu(cqe->slid); in handle_responder()
245 wc->slid = 0; in handle_responder()
/drivers/infiniband/ulp/opa_vnic/
Dopa_vnic_encap.c73 static inline void opa_vnic_make_header(u8 *hdr, u32 slid, u32 dlid, u16 len, in opa_vnic_make_header() argument
85 h[0] |= (slid & OPA_16B_LID_MASK); in opa_vnic_make_header()
86 h[2] |= ((slid >> (20 - OPA_16B_SLID_HIGH_SHFT)) & OPA_16B_SLID_MASK); in opa_vnic_make_header()
/drivers/infiniband/hw/vmw_pvrdma/
Dpvrdma_cq.c365 wc->slid = cqe->slid; in pvrdma_poll_one()
/drivers/infiniband/hw/mlx4/
Dcq.c596 wc->slid = 0; in use_tunnel_data()
602 wc->slid = be16_to_cpu(hdr->tun.slid_mac_47_32); in use_tunnel_data()
856 wc->slid = 0; in mlx4_ib_poll_one()
868 wc->slid = be16_to_cpu(cqe->rlid); in mlx4_ib_poll_one()
Dmad.c172 in_modifier |= ib_lid_cpu16(in_wc->slid) << 16; in mlx4_MAD_IFC()
646 tun_mad->hdr.slid_mac_47_32 = ib_lid_be16(wc->slid); in mlx4_ib_send_to_slave()
826 u16 slid, prev_lid = 0; in ib_process_mad() local
830 slid = in_wc ? ib_lid_cpu16(in_wc->slid) : be16_to_cpu(IB_LID_PERMISSIVE); in ib_process_mad()
832 if (in_mad->mad_hdr.method == IB_MGMT_METHOD_TRAP && slid == 0) { in ib_process_mad()
/drivers/infiniband/sw/rdmavt/
Dcq.c90 uqueue[head].slid = ib_lid_cpu16(entry->slid); in rvt_cq_enter()
/drivers/infiniband/hw/qib/
Dqib_ud.c208 wc.slid = ppd->lid | (rdma_ah_get_path_bits(ah_attr) & in qib_ud_loopback()
568 wc.slid = be16_to_cpu(hdr->lrh[3]); in qib_ud_rcv()
Dqib_uc.c396 wc.slid = rdma_ah_get_dlid(&qp->remote_ah_attr); in qib_uc_rcv()
/drivers/infiniband/ulp/ipoib/
Dipoib_ib.c217 wc->byte_len, wc->slid); in ipoib_ib_handle_rx_wc()
239 if (wc->slid == priv->local_lid && wc->src_qp == priv->qp->qp_num) { in ipoib_ib_handle_rx_wc()

12