/drivers/net/ethernet/chelsio/inline_crypto/ch_ktls/ |
D | chcr_ktls.c | 180 struct cpl_act_open_req *cpl; in chcr_ktls_act_open_req() local 194 cpl = (struct cpl_act_open_req *)cpl6; in chcr_ktls_act_open_req() 198 OPCODE_TID(cpl) = htonl(MK_OPCODE_TID(CPL_ACT_OPEN_REQ, qid_atid)); in chcr_ktls_act_open_req() 199 cpl->local_port = inet->inet_sport; in chcr_ktls_act_open_req() 200 cpl->peer_port = inet->inet_dport; in chcr_ktls_act_open_req() 201 cpl->local_ip = inet->inet_rcv_saddr; in chcr_ktls_act_open_req() 202 cpl->peer_ip = inet->inet_daddr; in chcr_ktls_act_open_req() 207 cpl->opt0 = cpu_to_be64(options); in chcr_ktls_act_open_req() 212 cpl->opt2 = htonl(options); in chcr_ktls_act_open_req() 231 struct cpl_act_open_req6 *cpl; in chcr_ktls_act_open_req6() local [all …]
|
/drivers/target/iscsi/cxgbit/ |
D | cxgbit_main.c | 195 struct cpl_rx_iscsi_ddp *cpl = (struct cpl_rx_iscsi_ddp *)(rsp + 1); in cxgbit_lro_add_packet_rsp() local 197 cxgbit_process_ddpvld(lro_cb->csk, pdu_cb, be32_to_cpu(cpl->ddpvld)); in cxgbit_lro_add_packet_rsp() 200 pdu_cb->ddigest = ntohl(cpl->ulp_crc); in cxgbit_lro_add_packet_rsp() 201 pdu_cb->pdulen = ntohs(cpl->len); in cxgbit_lro_add_packet_rsp() 243 struct cpl_iscsi_hdr *cpl = (struct cpl_iscsi_hdr *)gl->va; in cxgbit_lro_add_packet_gl() local 247 pdu_cb->seq = ntohl(cpl->seq); in cxgbit_lro_add_packet_gl() 248 len = ntohs(cpl->len); in cxgbit_lro_add_packet_gl() 258 struct cpl_iscsi_data *cpl = (struct cpl_iscsi_data *)gl->va; in cxgbit_lro_add_packet_gl() local 262 len = ntohs(cpl->len); in cxgbit_lro_add_packet_gl() 269 struct cpl_rx_iscsi_cmp *cpl; in cxgbit_lro_add_packet_gl() local [all …]
|
D | cxgbit_target.c | 130 struct cpl_tx_data_iso *cpl; in cxgbit_cpl_tx_data_iso() local 135 cpl = __skb_push(skb, sizeof(*cpl)); in cxgbit_cpl_tx_data_iso() 137 cpl->op_to_scsi = htonl(CPL_TX_DATA_ISO_OP_V(CPL_TX_DATA_ISO) | in cxgbit_cpl_tx_data_iso() 146 cpl->ahs_len = 0; in cxgbit_cpl_tx_data_iso() 147 cpl->mpdu = htons(DIV_ROUND_UP(iso_info->mpdu, 4)); in cxgbit_cpl_tx_data_iso() 148 cpl->burst_size = htonl(DIV_ROUND_UP(iso_info->burst_len, 4)); in cxgbit_cpl_tx_data_iso() 149 cpl->len = htonl(iso_info->len); in cxgbit_cpl_tx_data_iso() 150 cpl->reserved2_seglen_offset = htonl(0); in cxgbit_cpl_tx_data_iso() 151 cpl->datasn_offset = htonl(0); in cxgbit_cpl_tx_data_iso() 152 cpl->buffer_offset = htonl(0); in cxgbit_cpl_tx_data_iso() [all …]
|
D | cxgbit_cm.c | 1923 struct cpl_rx_data *cpl = cplhdr(skb); in cxgbit_rx_data() local 1924 unsigned int tid = GET_TID(cpl); in cxgbit_rx_data() 1964 struct cpl_tx_data *cpl = cplhdr(skb); in cxgbit_rx_cpl() local 1967 unsigned int tid = GET_TID(cpl); in cxgbit_rx_cpl()
|
/drivers/net/ethernet/chelsio/cxgb4/ |
D | sge.c | 1512 struct cpl_tx_pkt_core *cpl; in cxgb4_eth_xmit() local 1617 len += sizeof(*cpl); in cxgb4_eth_xmit() 1633 cpl = (void *)(tnl_lso + 1); in cxgb4_eth_xmit() 1645 cpl = write_tso_wr(adap, skb, lso); in cxgb4_eth_xmit() 1648 sgl = (u64 *)(cpl + 1); /* sgl start here */ in cxgb4_eth_xmit() 1659 cpl = write_eo_udp_wr(skb, eowr, hdrlen); in cxgb4_eth_xmit() 1662 start = (u64 *)(cpl + 1); in cxgb4_eth_xmit() 1679 cpl = (void *)(wr + 1); in cxgb4_eth_xmit() 1680 sgl = (u64 *)(cpl + 1); in cxgb4_eth_xmit() 1718 cpl->ctrl0 = htonl(ctrl0); in cxgb4_eth_xmit() [all …]
|
D | cxgb4_uld.h | 70 #define INIT_TP_WR_CPL(w, cpl, tid) do { \ argument 72 OPCODE_TID(w) = htonl(MK_OPCODE_TID(cpl, tid)); \
|
/drivers/scsi/cxgbi/cxgb4i/ |
D | cxgb4i.c | 678 cxgb4i_make_tx_iso_cpl(struct sk_buff *skb, struct cpl_tx_data_iso *cpl) in cxgb4i_make_tx_iso_cpl() argument 687 cpl->op_to_scsi = cpu_to_be32(CPL_TX_DATA_ISO_OP_V(CPL_TX_DATA_ISO) | in cxgb4i_make_tx_iso_cpl() 696 cpl->ahs_len = info->ahs; in cxgb4i_make_tx_iso_cpl() 697 cpl->mpdu = cpu_to_be16(DIV_ROUND_UP(info->mpdu, 4)); in cxgb4i_make_tx_iso_cpl() 698 cpl->burst_size = cpu_to_be32(info->burst_size); in cxgb4i_make_tx_iso_cpl() 699 cpl->len = cpu_to_be32(info->len); in cxgb4i_make_tx_iso_cpl() 700 cpl->reserved2_seglen_offset = in cxgb4i_make_tx_iso_cpl() 702 cpl->datasn_offset = cpu_to_be32(info->datasn_offset); in cxgb4i_make_tx_iso_cpl() 703 cpl->buffer_offset = cpu_to_be32(info->buffer_offset); in cxgb4i_make_tx_iso_cpl() 704 cpl->reserved3 = cpu_to_be32(0); in cxgb4i_make_tx_iso_cpl() [all …]
|
/drivers/dma/qcom/ |
D | qcom_adm.c | 127 void *cpl; member 415 async_desc->cpl = kzalloc(async_desc->dma_len, GFP_NOWAIT); in adm_prep_slave_sg() 416 if (!async_desc->cpl) in adm_prep_slave_sg() 422 cple = PTR_ALIGN(async_desc->cpl, ADM_DESC_ALIGN); in adm_prep_slave_sg() 436 async_desc->dma_addr = dma_map_single(adev->dev, async_desc->cpl, in adm_prep_slave_sg() 442 cple_addr = async_desc->dma_addr + ((void *)cple - async_desc->cpl); in adm_prep_slave_sg() 683 kfree(async_desc->cpl); in adm_dma_free_desc()
|
/drivers/net/ethernet/chelsio/inline_crypto/ch_ipsec/ |
D | chcr_ipsec.c | 464 struct cpl_tx_pkt_core *cpl; in copy_cpltx_pktxt() local 481 cpl = (struct cpl_tx_pkt_core *)pos; in copy_cpltx_pktxt() 491 cpl->ctrl0 = htonl(ctrl0); in copy_cpltx_pktxt() 492 cpl->pack = htons(0); in copy_cpltx_pktxt() 493 cpl->len = htons(skb->len); in copy_cpltx_pktxt() 494 cpl->ctrl1 = cpu_to_be64(cntrl); in copy_cpltx_pktxt()
|
/drivers/net/ethernet/chelsio/cxgb4vf/ |
D | sge.c | 1167 struct cpl_tx_pkt_core *cpl; in t4vf_eth_xmit() local 1288 sizeof(*cpl))); in t4vf_eth_xmit() 1312 cpl = (void *)(lso + 1); in t4vf_eth_xmit() 1327 len = is_eth_imm(skb) ? skb->len + sizeof(*cpl) : sizeof(*cpl); in t4vf_eth_xmit() 1336 cpl = (void *)(wr + 1); in t4vf_eth_xmit() 1357 cpl->ctrl0 = cpu_to_be32(TXPKT_OPCODE_V(CPL_TX_PKT_XT) | in t4vf_eth_xmit() 1360 cpl->pack = cpu_to_be16(0); in t4vf_eth_xmit() 1361 cpl->len = cpu_to_be16(skb->len); in t4vf_eth_xmit() 1362 cpl->ctrl1 = cpu_to_be64(cntrl); in t4vf_eth_xmit() 1379 inline_tx_skb(skb, &txq->q, cpl + 1); in t4vf_eth_xmit() [all …]
|
D | cxgb4vf_main.c | 495 void *cpl = (void *)(rsp + 1); in fwevtq_handler() local 502 const struct cpl_fw6_msg *fw_msg = cpl; in fwevtq_handler() 518 cpl = (void *)p; in fwevtq_handler() 532 const struct cpl_sge_egr_update *p = cpl; in fwevtq_handler()
|
/drivers/net/ethernet/chelsio/cxgb/ |
D | sge.c | 1822 struct cpl_tx_pkt *cpl; in t1_start_xmit() local 1858 cpl = (struct cpl_tx_pkt *)hdr; in t1_start_xmit() 1898 cpl = __skb_push(skb, sizeof(*cpl)); in t1_start_xmit() 1899 cpl->opcode = CPL_TX_PKT; in t1_start_xmit() 1900 cpl->ip_csum_dis = 1; /* SW calculates IP csum */ in t1_start_xmit() 1901 cpl->l4_csum_dis = skb->ip_summed == CHECKSUM_PARTIAL ? 0 : 1; in t1_start_xmit() 1906 cpl->iff = dev->if_port; in t1_start_xmit() 1909 cpl->vlan_valid = 1; in t1_start_xmit() 1910 cpl->vlan = htons(skb_vlan_tag_get(skb)); in t1_start_xmit() 1913 cpl->vlan_valid = 0; in t1_start_xmit()
|
/drivers/net/ethernet/chelsio/cxgb3/ |
D | sge.c | 1187 struct cpl_tx_pkt *cpl = (struct cpl_tx_pkt *)d; in write_tx_pkt_wr() local 1189 cpl->len = htonl(skb->len); in write_tx_pkt_wr() 1198 struct cpl_tx_pkt_lso *hdr = (struct cpl_tx_pkt_lso *)cpl; in write_tx_pkt_wr() 1214 cpl->cntrl = htonl(cntrl); in write_tx_pkt_wr() 1216 if (skb->len <= WR_LEN - sizeof(*cpl)) { in write_tx_pkt_wr() 1225 cpl->wr.wr_hi = htonl(V_WR_BCNTLFLT(skb->len & 7) | in write_tx_pkt_wr() 1229 cpl->wr.wr_lo = htonl(V_WR_LEN(flits) | V_WR_GEN(gen) | in write_tx_pkt_wr() 2142 struct cpl_rx_pkt *cpl; in lro_add_page() local 2175 cpl = qs->lro_va = sd->pg_chunk.va + 2; in lro_add_page() 2178 cpl->csum_valid && cpl->csum == htons(0xffff)) { in lro_add_page() [all …]
|
/drivers/gpu/drm/pl111/ |
D | pl111_display.c | 133 u32 cpl, tim2; in pl111_display_enable() local 155 cpl = mode->hdisplay - 1; in pl111_display_enable() 228 tim2 |= cpl << 16; in pl111_display_enable()
|
/drivers/infiniband/hw/cxgb4/ |
D | cm.c | 518 int cpl) in queue_arp_failure_cpl() argument 523 rpl->ot.opcode = cpl; in queue_arp_failure_cpl() 3806 struct cpl_pass_accept_req *cpl; in passive_ofld_conn_reply() local 3817 cpl = (struct cpl_pass_accept_req *)cplhdr(rpl_skb); in passive_ofld_conn_reply() 3818 OPCODE_TID(cpl) = htonl(MK_OPCODE_TID(CPL_PASS_ACCEPT_REQ, in passive_ofld_conn_reply() 3937 struct cpl_rx_pkt *cpl = cplhdr(skb); in build_cpl_pass_accept_req() local 3945 vlantag = cpl->vlan; in build_cpl_pass_accept_req() 3946 len = cpl->len; in build_cpl_pass_accept_req() 3947 l2info = cpl->l2info; in build_cpl_pass_accept_req() 3948 hdr_len = cpl->hdr_len; in build_cpl_pass_accept_req() [all …]
|
/drivers/watchdog/ |
D | octeon-wdt-main.c | 130 static irqreturn_t octeon_wdt_poke_irq(int cpl, void *dev_id) in octeon_wdt_poke_irq() argument 143 disable_irq_nosync(cpl); in octeon_wdt_poke_irq()
|
/drivers/input/serio/ |
D | ct82c710.c | 57 static irqreturn_t ct82c710_interrupt(int cpl, void *dev_id) in ct82c710_interrupt() argument
|
/drivers/staging/octeon/ |
D | ethernet-spi.c | 106 static irqreturn_t cvm_oct_spi_rml_interrupt(int cpl, void *dev_id) in cvm_oct_spi_rml_interrupt() argument
|
D | ethernet-tx.c | 688 static irqreturn_t cvm_oct_tx_cleanup_watchdog(int cpl, void *dev_id) in cvm_oct_tx_cleanup_watchdog() argument
|
/drivers/misc/ |
D | apds990x.c | 251 u32 cpl; in apds990x_lux_to_threshold() local 277 cpl = ((u32)chip->atime * (u32)again[chip->again_next] * in apds990x_lux_to_threshold() 280 thres = lux * cpl / 64; in apds990x_lux_to_threshold()
|
/drivers/scsi/csiostor/ |
D | csio_scsi.c | 1106 struct cpl_fw6_msg *cpl; in csio_scsi_cmpl_handler() local 1112 cpl = (struct cpl_fw6_msg *)((uintptr_t)wr + sizeof(__be64)); in csio_scsi_cmpl_handler() 1114 if (unlikely(cpl->opcode != CPL_FW6_MSG)) { in csio_scsi_cmpl_handler() 1116 cpl->opcode); in csio_scsi_cmpl_handler() 1121 tempwr = (uint8_t *)(cpl->data); in csio_scsi_cmpl_handler()
|
/drivers/pcmcia/ |
D | pcmcia_resource.c | 716 static irqreturn_t test_action(int cpl, void *dev_id) in test_action() argument
|
/drivers/video/fbdev/ |
D | uvesafb.c | 248 struct completion *cpl = task->done; in uvesafb_reset() local 251 task->done = cpl; in uvesafb_reset()
|
/drivers/net/ethernet/cavium/octeon/ |
D | octeon_mgmt.c | 669 static irqreturn_t octeon_mgmt_interrupt(int cpl, void *dev_id) in octeon_mgmt_interrupt() argument
|