Home
last modified time | relevance | path

Searched refs:slid (Results 1 – 25 of 36) sorted by relevance

12

/drivers/infiniband/hw/hfi1/
Dud.c112 u32 slid; in ud_loopback() local
116 slid = ppd->lid | (rdma_ah_get_path_bits(ah_attr) & in ud_loopback()
120 slid, false))) { in ud_loopback()
124 slid, rdma_ah_get_dlid(ah_attr)); in ud_loopback()
268 wc.slid = ppd->lid | (rdma_ah_get_path_bits(ah_attr) & in ud_loopback()
271 if (wc.slid == 0 && sqp->ibqp.qp_type == IB_QPT_GSI) in ud_loopback()
272 wc.slid = be16_to_cpu(IB_LID_PERMISSIVE); in ud_loopback()
326 u16 len, slid, dlid, pkey; in hfi1_make_ud_req_9B() local
371 slid = be16_to_cpu(IB_LID_PERMISSIVE); in hfi1_make_ud_req_9B()
378 slid = lid; in hfi1_make_ud_req_9B()
[all …]
Dtrace_ibhdrs.h110 u16 *len, u32 *dlid, u32 *slid);
119 u32 *dlid, u32 *slid);
125 u16 len, u16 pkey, u32 dlid, u32 slid);
172 __field(u32, slid)
194 &__entry->slid);
214 &__entry->slid);
253 __entry->slid),
309 __field(u32, slid)
332 &__entry->slid);
356 &__entry->slid);
[all …]
Druc.c226 u32 slid = packet->slid; in hfi1_ruc_check_hdr() local
266 sc5, slid))) { in hfi1_ruc_check_hdr()
268 slid, dlid); in hfi1_ruc_check_hdr()
272 if (slid != rdma_ah_get_dlid(&qp->alt_ah_attr) || in hfi1_ruc_check_hdr()
303 sc5, slid))) { in hfi1_ruc_check_hdr()
305 slid, dlid); in hfi1_ruc_check_hdr()
309 if ((slid != rdma_ah_get_dlid(&qp->remote_ah_attr)) || in hfi1_ruc_check_hdr()
565 wc.slid = rdma_ah_get_dlid(&qp->remote_ah_attr); in ruc_loopback()
776 u32 slid; in hfi1_make_ruc_header_16B() local
827 slid = be32_to_cpu(OPA_LID_PERMISSIVE); in hfi1_make_ruc_header_16B()
[all …]
Dhfi.h342 u32 slid; member
898 u32 slid; member
1533 u16 pkey, u32 slid, u32 dlid, u8 sc5,
1536 u32 remote_qpn, u16 pkey, u32 slid, u32 dlid,
1539 u32 remote_qpn, u16 pkey, u32 slid, u32 dlid,
1548 int egress_pkey_check(struct hfi1_pportdata *ppd, u32 slid, u16 pkey,
1630 u16 slid) in ingress_pkey_table_fail() argument
1637 dd->err_info_rcv_constraint.slid = slid; in ingress_pkey_table_fail()
1651 u8 sc5, u8 idx, u32 slid, bool force) in ingress_pkey_check() argument
1673 ingress_pkey_table_fail(ppd, pkey, slid); in ingress_pkey_check()
[all …]
Dtrace.c178 u16 *len, u32 *dlid, u32 *slid) in hfi1_trace_parse_9b_hdr() argument
186 *slid = ib_get_slid(hdr); in hfi1_trace_parse_9b_hdr()
193 u32 *dlid, u32 *slid) in hfi1_trace_parse_16b_hdr() argument
205 *slid = hfi1_16B_get_slid(hdr); in hfi1_trace_parse_16b_hdr()
216 u16 len, u16 pkey, u32 dlid, u32 slid) in hfi1_trace_fmt_lrh() argument
220 trace_seq_printf(p, LRH_PRN, len, sc, dlid, slid); in hfi1_trace_fmt_lrh()
Ddriver.c445 u32 rlid, slid, dlid = 0; in hfi1_process_ecn_slowpath() local
455 slid = hfi1_16B_get_slid(pkt->hdr); in hfi1_process_ecn_slowpath()
463 slid = ib_get_slid(pkt->hdr); in hfi1_process_ecn_slowpath()
470 rlid = slid; in hfi1_process_ecn_slowpath()
476 rlid = slid; in hfi1_process_ecn_slowpath()
1401 if ((!packet->slid) || (!packet->dlid)) in hfi1_bypass_ingress_pkt_check()
1455 packet->slid = ib_get_slid(hdr); in hfi1_setup_9B_packet()
1521 packet->slid = hfi1_16B_get_slid(packet->hdr); in hfi1_setup_bypass_packet()
Dverbs.c569 packet->slid, true); in hfi1_do_pkey_check()
1210 int egress_pkey_check(struct hfi1_pportdata *ppd, u32 slid, u16 pkey, in egress_pkey_check() argument
1254 dd->err_info_xmit_constraint.slid = slid; in egress_pkey_check()
1314 u32 slid; in hfi1_verbs_send() local
1325 slid = hfi1_16B_get_slid(hdr); in hfi1_verbs_send()
1335 slid = ib_get_slid(hdr); in hfi1_verbs_send()
1341 ret = egress_pkey_check(dd->pport, slid, pkey, in hfi1_verbs_send()
Duser_sdma.c340 u32 slid; in hfi1_user_sdma_process_request() local
474 slid = be16_to_cpu(req->hdr.lrh[3]); in hfi1_user_sdma_process_request()
475 if (egress_pkey_check(dd->pport, slid, pkey, sc, PKEY_CHECK_INVALID)) { in hfi1_user_sdma_process_request()
Dmad.c2544 __be32 slid; member
2552 __be32 slid; member
3335 rsp->port_xmit_constraint_ei.slid = in pma_get_opa_errorinfo()
3336 cpu_to_be32(dd->err_info_xmit_constraint.slid); in pma_get_opa_errorinfo()
3342 rsp->port_rcv_constraint_ei.slid = in pma_get_opa_errorinfo()
3343 cpu_to_be32(dd->err_info_rcv_constraint.slid); in pma_get_opa_errorinfo()
4272 return (in_wc->slid == ppd->lid); in is_local_mad()
4319 ingress_pkey_table_fail(ppd, pkey, ib_lid_cpu16(0xFFFF & in_wc->slid)); in opa_local_smp_check()
Duc.c466 wc.slid = rdma_ah_get_dlid(&qp->remote_ah_attr); in hfi1_uc_rcv()
/drivers/infiniband/core/
Duverbs_marshall.c144 dst->slid = htons(ntohl(sa_path_get_slid(src))); in __ib_copy_path_rec_to_user()
179 __be32 slid, dlid; in ib_copy_path_rec_from_user() local
185 slid = htonl(opa_get_lid_from_gid((union ib_gid *)src->sgid)); in ib_copy_path_rec_from_user()
189 slid = htonl(ntohs(src->slid)); in ib_copy_path_rec_from_user()
196 sa_path_set_slid(dst, slid); in ib_copy_path_rec_from_user()
Dmad_rmpp.c67 u32 slid; member
319 rmpp_recv->slid = mad_recv_wc->wc->slid; in create_rmpp_recv()
340 rmpp_recv->slid == mad_recv_wc->wc->slid && in find_rmpp_recv()
873 if (rmpp_recv->slid == rdma_ah_get_dlid(&ah_attr)) { in init_newwin()
Dcm.c1204 pri_path->opa.slid); in cm_format_req()
1236 = OPA_MAKE_ID(be32_to_cpu(pri_path->opa.slid)); in cm_format_req()
1265 alt_path->opa.slid); in cm_format_req()
1271 = OPA_MAKE_ID(be32_to_cpu(alt_path->opa.slid)); in cm_format_req()
1795 req_msg->primary_local_lid = ib_lid_be16(wc->slid); in cm_process_routed_req()
1805 req_msg->alt_local_lid = ib_lid_be16(wc->slid); in cm_process_routed_req()
2948 alternate_path->opa.slid); in cm_format_lap()
2964 = OPA_MAKE_ID(be32_to_cpu(alternate_path->opa.slid)); in cm_format_lap()
3401 cm_id_priv->av.dgid.global.subnet_prefix = cpu_to_be64(wc->slid); in cm_sidr_req_handler()
Duser_mad.c240 mad_recv_wc->wc->slid); in recv_handler()
242 packet->mad.hdr.lid = ib_lid_be16(mad_recv_wc->wc->slid); in recv_handler()
/drivers/infiniband/hw/mthca/
Dmthca_mad.c208 u16 slid = in_wc ? ib_lid_cpu16(in_wc->slid) : be16_to_cpu(IB_LID_PERMISSIVE); in mthca_process_mad() local
220 slid == 0) { in mthca_process_mad()
/drivers/infiniband/hw/mlx5/
Dmad.c78 u16 slid; in process_mad() local
81 slid = in_wc ? ib_lid_cpu16(in_wc->slid) : be16_to_cpu(IB_LID_PERMISSIVE); in process_mad()
83 if (in_mad->mad_hdr.method == IB_MGMT_METHOD_TRAP && slid == 0) in process_mad()
Dcq.c241 wc->slid = be16_to_cpu(cqe->slid); in handle_responder()
246 wc->slid = 0; in handle_responder()
/drivers/infiniband/ulp/opa_vnic/
Dopa_vnic_encap.c73 static inline void opa_vnic_make_header(u8 *hdr, u32 slid, u32 dlid, u16 len, in opa_vnic_make_header() argument
85 h[0] |= (slid & OPA_16B_LID_MASK); in opa_vnic_make_header()
86 h[2] |= ((slid >> (20 - OPA_16B_SLID_HIGH_SHFT)) & OPA_16B_SLID_MASK); in opa_vnic_make_header()
/drivers/infiniband/hw/vmw_pvrdma/
Dpvrdma_cq.c389 wc->slid = cqe->slid; in pvrdma_poll_one()
/drivers/infiniband/hw/mlx4/
Dcq.c600 wc->slid = 0; in use_tunnel_data()
606 wc->slid = be16_to_cpu(hdr->tun.slid_mac_47_32); in use_tunnel_data()
857 wc->slid = 0; in mlx4_ib_poll_one()
869 wc->slid = be16_to_cpu(cqe->rlid); in mlx4_ib_poll_one()
Dmad.c172 in_modifier |= ib_lid_cpu16(in_wc->slid) << 16; in mlx4_MAD_IFC()
628 tun_mad->hdr.slid_mac_47_32 = ib_lid_be16(wc->slid); in mlx4_ib_send_to_slave()
806 u16 slid, prev_lid = 0; in ib_process_mad() local
813 in_wc->slid, in_wc->src_qp, in ib_process_mad()
829 slid = in_wc ? ib_lid_cpu16(in_wc->slid) : be16_to_cpu(IB_LID_PERMISSIVE); in ib_process_mad()
831 if (in_mad->mad_hdr.method == IB_MGMT_METHOD_TRAP && slid == 0) { in ib_process_mad()
/drivers/infiniband/sw/rdmavt/
Dcq.c110 wc->uqueue[head].slid = ib_lid_cpu16(entry->slid); in rvt_cq_enter()
/drivers/infiniband/hw/qib/
Dqib_ud.c212 wc.slid = ppd->lid | (rdma_ah_get_path_bits(ah_attr) & in qib_ud_loopback()
572 wc.slid = be16_to_cpu(hdr->lrh[3]); in qib_ud_rcv()
Dqib_uc.c397 wc.slid = rdma_ah_get_dlid(&qp->remote_ah_attr); in qib_uc_rcv()
/drivers/infiniband/ulp/ipoib/
Dipoib_ib.c217 wc->byte_len, wc->slid); in ipoib_ib_handle_rx_wc()
239 if (wc->slid == priv->local_lid && wc->src_qp == priv->qp->qp_num) { in ipoib_ib_handle_rx_wc()

12