/drivers/infiniband/core/ |
D | ud_header.c | 47 { STRUCT_FIELD(lrh, virtual_lane), 51 { STRUCT_FIELD(lrh, link_version), 55 { STRUCT_FIELD(lrh, service_level), 63 { STRUCT_FIELD(lrh, link_next_header), 67 { STRUCT_FIELD(lrh, destination_lid), 75 { STRUCT_FIELD(lrh, packet_length), 79 { STRUCT_FIELD(lrh, source_lid), 239 header->lrh.link_version = 0; in ib_ud_header_init() 240 header->lrh.link_next_header = in ib_ud_header_init() 249 header->lrh.packet_length = cpu_to_be16(packet_length); in ib_ud_header_init() [all …]
|
/drivers/infiniband/hw/qib/ |
D | qib_ud.c | 352 qp->s_hdr->lrh[0] = cpu_to_be16(lrh0); in qib_make_ud_req() 353 qp->s_hdr->lrh[1] = cpu_to_be16(ah_attr->dlid); /* DEST LID */ in qib_make_ud_req() 354 qp->s_hdr->lrh[2] = cpu_to_be16(qp->s_hdrwords + nwords + SIZE_OF_CRC); in qib_make_ud_req() 358 qp->s_hdr->lrh[3] = cpu_to_be16(lid); in qib_make_ud_req() 360 qp->s_hdr->lrh[3] = IB_LID_PERMISSIVE; in qib_make_ud_req() 466 if (unlikely(hdr->lrh[1] == IB_LID_PERMISSIVE || in qib_ud_rcv() 467 hdr->lrh[3] == IB_LID_PERMISSIVE)) in qib_ud_rcv() 477 (be16_to_cpu(hdr->lrh[0]) >> 4) & in qib_ud_rcv() 480 hdr->lrh[3], hdr->lrh[1]); in qib_ud_rcv() 486 (be16_to_cpu(hdr->lrh[0]) >> 4) & 0xF, in qib_ud_rcv() [all …]
|
D | qib_ruc.c | 292 (be16_to_cpu(hdr->lrh[0]) >> 4) & 0xF, in qib_ruc_check_hdr() 294 hdr->lrh[3], hdr->lrh[1]); in qib_ruc_check_hdr() 298 if (be16_to_cpu(hdr->lrh[3]) != qp->alt_ah_attr.dlid || in qib_ruc_check_hdr() 324 (be16_to_cpu(hdr->lrh[0]) >> 4) & 0xF, in qib_ruc_check_hdr() 326 hdr->lrh[3], hdr->lrh[1]); in qib_ruc_check_hdr() 330 if (be16_to_cpu(hdr->lrh[3]) != qp->remote_ah_attr.dlid || in qib_ruc_check_hdr() 694 qp->s_hdr->lrh[0] = cpu_to_be16(lrh0); in qib_make_ruc_header() 695 qp->s_hdr->lrh[1] = cpu_to_be16(qp->remote_ah_attr.dlid); in qib_make_ruc_header() 696 qp->s_hdr->lrh[2] = cpu_to_be16(qp->s_hdrwords + nwords + SIZE_OF_CRC); in qib_make_ruc_header() 697 qp->s_hdr->lrh[3] = cpu_to_be16(ppd_from_ibp(ibp)->lid | in qib_make_ruc_header()
|
D | qib_driver.c | 311 u16 lid = be16_to_cpu(hdr->lrh[1]); in qib_rcv_hdrerr() 312 int lnh = be16_to_cpu(hdr->lrh[0]) & 3; in qib_rcv_hdrerr() 493 u16 lrh_len = be16_to_cpu(hdr->lrh[2]) << 2; in qib_kreceive()
|
D | qib_user_sdma.c | 452 hdr->lrh[2] = cpu_to_be16(le16_to_cpu(pbc16[0])); in qib_user_sdma_page_to_frags() 470 be16_to_cpu(hdr->lrh[2]) - in qib_user_sdma_page_to_frags() 498 hdr->lrh[2] = cpu_to_be16(le16_to_cpu(pbc16[0])); in qib_user_sdma_page_to_frags() 514 be16_to_cpu(hdr->lrh[2]) - in qib_user_sdma_page_to_frags()
|
D | qib_common.h | 700 __be16 lrh[4]; member
|
D | qib_rc.c | 696 hdr.lrh[0] = cpu_to_be16(lrh0); in qib_send_rc_ack() 697 hdr.lrh[1] = cpu_to_be16(qp->remote_ah_attr.dlid); in qib_send_rc_ack() 698 hdr.lrh[2] = cpu_to_be16(hwords + SIZE_OF_CRC); in qib_send_rc_ack() 699 hdr.lrh[3] = cpu_to_be16(ppd->lid | qp->remote_ah_attr.src_path_bits); in qib_send_rc_ack() 982 if ((be16_to_cpu(hdr->lrh[0]) & 3) == QIB_LRH_BTH) in qib_rc_send_complete()
|
D | qib_verbs.c | 626 lid = be16_to_cpu(hdr->lrh[1]); in qib_ib_rcv() 634 lnh = be16_to_cpu(hdr->lrh[0]) & 3; in qib_ib_rcv() 1162 be16_to_cpu(hdr->lrh[0]) >> 12); in qib_verbs_send_dma() 1286 be16_to_cpu(ibhdr->lrh[0]) >> 12); in qib_verbs_send_pio()
|
D | qib_verbs.h | 190 __be16 lrh[4]; member
|
D | qib_iba7322.c | 1432 .lrh[0] = cpu_to_be16(0xF000 | QIB_LRH_BTH), in flush_fifo() 1433 .lrh[1] = IB_LID_PERMISSIVE, in flush_fifo() 1434 .lrh[2] = cpu_to_be16(hdrwords + SIZE_OF_CRC), in flush_fifo() 1435 .lrh[3] = IB_LID_PERMISSIVE, in flush_fifo()
|
/drivers/infiniband/hw/ipath/ |
D | ipath_ud.c | 352 qp->s_hdr.lrh[0] = cpu_to_be16(lrh0); in ipath_make_ud_req() 353 qp->s_hdr.lrh[1] = cpu_to_be16(ah_attr->dlid); /* DEST LID */ in ipath_make_ud_req() 354 qp->s_hdr.lrh[2] = cpu_to_be16(qp->s_hdrwords + nwords + in ipath_make_ud_req() 360 qp->s_hdr.lrh[3] = cpu_to_be16(lid); in ipath_make_ud_req() 362 qp->s_hdr.lrh[3] = IB_LID_PERMISSIVE; in ipath_make_ud_req() 454 if (unlikely(hdr->lrh[1] == IB_LID_PERMISSIVE || in ipath_ud_rcv() 455 hdr->lrh[3] == IB_LID_PERMISSIVE)) { in ipath_ud_rcv() 465 } else if (hdr->lrh[1] == IB_LID_PERMISSIVE || in ipath_ud_rcv() 466 hdr->lrh[3] == IB_LID_PERMISSIVE) { in ipath_ud_rcv() 509 (be16_to_cpu(hdr->lrh[0]) >> 12) != 15)) || in ipath_ud_rcv() [all …]
|
D | ipath_ruc.c | 619 qp->s_hdr.lrh[0] = cpu_to_be16(lrh0); in ipath_make_ruc_header() 620 qp->s_hdr.lrh[1] = cpu_to_be16(qp->remote_ah_attr.dlid); in ipath_make_ruc_header() 621 qp->s_hdr.lrh[2] = cpu_to_be16(qp->s_hdrwords + nwords + SIZE_OF_CRC); in ipath_make_ruc_header() 622 qp->s_hdr.lrh[3] = cpu_to_be16(dev->dd->ipath_lid | in ipath_make_ruc_header()
|
D | ipath_common.h | 743 __be16 lrh[4]; member 752 __be16 lrh[4]; member
|
D | ipath_rc.c | 674 hdr.lrh[0] = cpu_to_be16(lrh0); in send_rc_ack() 675 hdr.lrh[1] = cpu_to_be16(qp->remote_ah_attr.dlid); in send_rc_ack() 676 hdr.lrh[2] = cpu_to_be16(hwords + SIZE_OF_CRC); in send_rc_ack() 677 hdr.lrh[3] = cpu_to_be16(dd->ipath_lid | in send_rc_ack() 1586 if (unlikely(be16_to_cpu(hdr->lrh[3]) != qp->remote_ah_attr.dlid)) in ipath_rc_rcv()
|
D | ipath_verbs.c | 603 lid = be16_to_cpu(hdr->lrh[1]); in ipath_ib_rcv() 613 lnh = be16_to_cpu(hdr->lrh[0]) & 3; in ipath_ib_rcv() 1148 if ((be16_to_cpu(hdr->lrh[0]) >> 12) == 15) { in ipath_verbs_send_dma() 1250 if ((be16_to_cpu(ibhdr->lrh[0]) >> 12) == 15) in ipath_verbs_send_pio()
|
D | ipath_uc.c | 254 if (unlikely(be16_to_cpu(hdr->lrh[3]) != qp->remote_ah_attr.dlid)) in ipath_uc_rcv()
|
D | ipath_verbs.h | 136 __be16 lrh[4]; member
|
/drivers/infiniband/hw/mthca/ |
D | mthca_av.c | 272 header->lrh.service_level = be32_to_cpu(ah->av->sl_tclass_flowlabel) >> 28; in mthca_read_ah() 273 header->lrh.destination_lid = ah->av->dlid; in mthca_read_ah() 274 header->lrh.source_lid = cpu_to_be16(ah->av->g_slid & 0x7f); in mthca_read_ah()
|
D | mthca_qp.c | 1496 (sqp->ud_header.lrh.destination_lid == in build_mlx_header() 1498 (sqp->ud_header.lrh.service_level << 8)); in build_mlx_header() 1499 mlx->rlid = sqp->ud_header.lrh.destination_lid; in build_mlx_header() 1516 sqp->ud_header.lrh.virtual_lane = !sqp->qp.ibqp.qp_num ? 15 : 0; in build_mlx_header() 1517 if (sqp->ud_header.lrh.destination_lid == IB_LID_PERMISSIVE) in build_mlx_header() 1518 sqp->ud_header.lrh.source_lid = IB_LID_PERMISSIVE; in build_mlx_header()
|
/drivers/infiniband/hw/mlx4/ |
D | qp.c | 2015 sqp->ud_header.lrh.service_level = in build_sriov_qp0_header() 2017 sqp->ud_header.lrh.destination_lid = in build_sriov_qp0_header() 2019 sqp->ud_header.lrh.source_lid = in build_sriov_qp0_header() 2027 mlx->rlid = sqp->ud_header.lrh.destination_lid; in build_sriov_qp0_header() 2029 sqp->ud_header.lrh.virtual_lane = 0; in build_sriov_qp0_header() 2158 sqp->ud_header.lrh.service_level = in build_mlx_header() 2160 sqp->ud_header.lrh.destination_lid = ah->av.ib.dlid; in build_mlx_header() 2161 sqp->ud_header.lrh.source_lid = cpu_to_be16(ah->av.ib.g_slid & 0x7f); in build_mlx_header() 2197 (sqp->ud_header.lrh.destination_lid == in build_mlx_header() 2199 (sqp->ud_header.lrh.service_level << 8)); in build_mlx_header() [all …]
|