/drivers/i3c/master/mipi-i3c-hci/ |
D | cmd_v2.c | 93 struct hci_xfer *xfer, in hci_cmd_v2_prep_private_xfer() argument 97 u8 *data = xfer->data; in hci_cmd_v2_prep_private_xfer() 98 unsigned int data_len = xfer->data_len; in hci_cmd_v2_prep_private_xfer() 99 bool rnw = xfer->rnw; in hci_cmd_v2_prep_private_xfer() 101 xfer->cmd_tid = hci_get_tid(); in hci_cmd_v2_prep_private_xfer() 104 xfer->cmd_desc[0] = in hci_cmd_v2_prep_private_xfer() 106 CMD_U0_TID(xfer->cmd_tid) | in hci_cmd_v2_prep_private_xfer() 111 xfer->cmd_desc[1] = in hci_cmd_v2_prep_private_xfer() 113 xfer->cmd_desc[2] = 0; in hci_cmd_v2_prep_private_xfer() 114 xfer->cmd_desc[3] = 0; in hci_cmd_v2_prep_private_xfer() [all …]
|
D | cmd_v1.c | 148 static void fill_data_bytes(struct hci_xfer *xfer, u8 *data, in fill_data_bytes() argument 151 xfer->cmd_desc[1] = 0; in fill_data_bytes() 154 xfer->cmd_desc[1] |= CMD_I1_DATA_BYTE_4(data[3]); in fill_data_bytes() 157 xfer->cmd_desc[1] |= CMD_I1_DATA_BYTE_3(data[2]); in fill_data_bytes() 160 xfer->cmd_desc[1] |= CMD_I1_DATA_BYTE_2(data[1]); in fill_data_bytes() 163 xfer->cmd_desc[1] |= CMD_I1_DATA_BYTE_1(data[0]); in fill_data_bytes() 169 xfer->data = NULL; in fill_data_bytes() 173 struct hci_xfer *xfer, in hci_cmd_v1_prep_ccc() argument 178 u8 *data = xfer->data; in hci_cmd_v1_prep_ccc() 179 unsigned int data_len = xfer->data_len; in hci_cmd_v1_prep_ccc() [all …]
|
D | pio.c | 227 static void hci_pio_write_cmd(struct i3c_hci *hci, struct hci_xfer *xfer) in hci_pio_write_cmd() argument 229 DBG("cmd_desc[%d] = 0x%08x", 0, xfer->cmd_desc[0]); in hci_pio_write_cmd() 230 DBG("cmd_desc[%d] = 0x%08x", 1, xfer->cmd_desc[1]); in hci_pio_write_cmd() 231 pio_reg_write(COMMAND_QUEUE_PORT, xfer->cmd_desc[0]); in hci_pio_write_cmd() 232 pio_reg_write(COMMAND_QUEUE_PORT, xfer->cmd_desc[1]); in hci_pio_write_cmd() 234 DBG("cmd_desc[%d] = 0x%08x", 2, xfer->cmd_desc[2]); in hci_pio_write_cmd() 235 DBG("cmd_desc[%d] = 0x%08x", 3, xfer->cmd_desc[3]); in hci_pio_write_cmd() 236 pio_reg_write(COMMAND_QUEUE_PORT, xfer->cmd_desc[2]); in hci_pio_write_cmd() 237 pio_reg_write(COMMAND_QUEUE_PORT, xfer->cmd_desc[3]); in hci_pio_write_cmd() 243 struct hci_xfer *xfer = pio->curr_rx; in hci_pio_do_rx() local [all …]
|
D | core.c | 195 struct hci_xfer *xfer; in i3c_hci_send_ccc_cmd() local 205 xfer = hci_alloc_xfer(nxfers); in i3c_hci_send_ccc_cmd() 206 if (!xfer) in i3c_hci_send_ccc_cmd() 210 xfer->data = NULL; in i3c_hci_send_ccc_cmd() 211 xfer->data_len = 0; in i3c_hci_send_ccc_cmd() 212 xfer->rnw = false; in i3c_hci_send_ccc_cmd() 213 hci->cmd->prep_ccc(hci, xfer, I3C_BROADCAST_ADDR, in i3c_hci_send_ccc_cmd() 215 xfer++; in i3c_hci_send_ccc_cmd() 219 xfer[i].data = ccc->dests[i].payload.data; in i3c_hci_send_ccc_cmd() 220 xfer[i].data_len = ccc->dests[i].payload.len; in i3c_hci_send_ccc_cmd() [all …]
|
D | dma.c | 130 void *xfer, *resp, *ibi_status, *ibi_data; member 182 if (rh->xfer) in hci_dma_cleanup() 185 rh->xfer, rh->xfer_dma); in hci_dma_cleanup() 254 rh->xfer = dma_alloc_coherent(&hci->master.dev, xfers_sz, in hci_dma_init() 262 if (!rh->xfer || !rh->resp || !rh->src_xfers) in hci_dma_init() 343 struct hci_xfer *xfer; in hci_dma_unmap_xfer() local 347 xfer = xfer_list + i; in hci_dma_unmap_xfer() 349 xfer->data_dma, xfer->data_len, in hci_dma_unmap_xfer() 350 xfer->rnw ? DMA_FROM_DEVICE : DMA_TO_DEVICE); in hci_dma_unmap_xfer() 369 struct hci_xfer *xfer = xfer_list + i; in hci_dma_queue_xfer() local [all …]
|
/drivers/firmware/arm_scmi/ |
D | driver.c | 262 struct scmi_xfer *xfer) in scmi_xfer_token_set() argument 274 next_token = (xfer->transfer_id & (MSG_TOKEN_MAX - 1)); in scmi_xfer_token_set() 302 xfer->hdr.seq = (u16)xfer_id; in scmi_xfer_token_set() 314 struct scmi_xfer *xfer) in scmi_xfer_token_clear() argument 316 clear_bit(xfer->hdr.seq, minfo->xfer_alloc_table); in scmi_xfer_token_clear() 347 struct scmi_xfer *xfer; in scmi_xfer_get() local 356 xfer = hlist_entry(minfo->free_xfers.first, struct scmi_xfer, node); in scmi_xfer_get() 357 hlist_del_init(&xfer->node); in scmi_xfer_get() 363 xfer->transfer_id = atomic_inc_return(&transfer_last_id); in scmi_xfer_get() 367 ret = scmi_xfer_token_set(minfo, xfer); in scmi_xfer_get() [all …]
|
D | msg.c | 33 size_t msg_command_size(struct scmi_xfer *xfer) in msg_command_size() argument 35 return sizeof(struct scmi_msg_payld) + xfer->tx.len; in msg_command_size() 45 size_t msg_response_size(struct scmi_xfer *xfer) in msg_response_size() argument 47 return sizeof(struct scmi_msg_payld) + sizeof(__le32) + xfer->rx.len; in msg_response_size() 56 void msg_tx_prepare(struct scmi_msg_payld *msg, struct scmi_xfer *xfer) in msg_tx_prepare() argument 58 msg->msg_header = cpu_to_le32(pack_scmi_header(&xfer->hdr)); in msg_tx_prepare() 59 if (xfer->tx.buf) in msg_tx_prepare() 60 memcpy(msg->msg_payload, xfer->tx.buf, xfer->tx.len); in msg_tx_prepare() 83 struct scmi_xfer *xfer) in msg_fetch_response() argument 87 xfer->hdr.status = le32_to_cpu(msg->msg_payload[0]); in msg_fetch_response() [all …]
|
D | shmem.c | 33 struct scmi_xfer *xfer) in shmem_tx_prepare() argument 45 iowrite32(xfer->hdr.poll_completion ? 0 : SCMI_SHMEM_FLAG_INTR_ENABLED, in shmem_tx_prepare() 47 iowrite32(sizeof(shmem->msg_header) + xfer->tx.len, &shmem->length); in shmem_tx_prepare() 48 iowrite32(pack_scmi_header(&xfer->hdr), &shmem->msg_header); in shmem_tx_prepare() 49 if (xfer->tx.buf) in shmem_tx_prepare() 50 memcpy_toio(shmem->msg_payload, xfer->tx.buf, xfer->tx.len); in shmem_tx_prepare() 59 struct scmi_xfer *xfer) in shmem_fetch_response() argument 63 xfer->hdr.status = ioread32(shmem->msg_payload); in shmem_fetch_response() 65 xfer->rx.len = min_t(size_t, xfer->rx.len, len > 8 ? len - 8 : 0); in shmem_fetch_response() 68 memcpy_fromio(xfer->rx.buf, shmem->msg_payload + 4, xfer->rx.len); in shmem_fetch_response() [all …]
|
/drivers/firmware/ |
D | ti_sci.c | 236 struct ti_sci_xfer *xfer; in ti_sci_rx_callback() local 250 xfer = &minfo->xfer_block[xfer_id]; in ti_sci_rx_callback() 259 if (mbox_msg->len < xfer->rx_len) { in ti_sci_rx_callback() 261 mbox_msg->len, xfer->rx_len); in ti_sci_rx_callback() 268 memcpy(xfer->xfer_buf, mbox_msg->buf, xfer->rx_len); in ti_sci_rx_callback() 269 complete(&xfer->done); in ti_sci_rx_callback() 295 struct ti_sci_xfer *xfer; in ti_sci_get_one_xfer() local 335 xfer = &minfo->xfer_block[xfer_id]; in ti_sci_get_one_xfer() 337 hdr = (struct ti_sci_msg_hdr *)xfer->tx_message.buf; in ti_sci_get_one_xfer() 338 xfer->tx_message.len = tx_message_size; in ti_sci_get_one_xfer() [all …]
|
/drivers/spi/ |
D | spi-atmel.c | 411 static inline bool atmel_spi_is_vmalloc_xfer(struct spi_transfer *xfer) in atmel_spi_is_vmalloc_xfer() argument 413 return is_vmalloc_addr(xfer->tx_buf) || is_vmalloc_addr(xfer->rx_buf); in atmel_spi_is_vmalloc_xfer() 417 struct spi_transfer *xfer) in atmel_spi_use_dma() argument 419 return as->use_dma && xfer->len >= DMA_MIN_BYTES; in atmel_spi_use_dma() 424 struct spi_transfer *xfer) in atmel_spi_can_dma() argument 429 return atmel_spi_use_dma(as, xfer) && in atmel_spi_can_dma() 430 !atmel_spi_is_vmalloc_xfer(xfer); in atmel_spi_can_dma() 432 return atmel_spi_use_dma(as, xfer); in atmel_spi_can_dma() 579 struct spi_transfer *xfer) in atmel_spi_next_xfer_single() argument 582 unsigned long xfer_pos = xfer->len - as->current_remaining_bytes; in atmel_spi_next_xfer_single() [all …]
|
D | spi-qcom-qspi.c | 143 struct qspi_xfer xfer; member 170 const struct qspi_xfer *xfer; in qcom_qspi_pio_xfer_cfg() local 172 xfer = &ctrl->xfer; in qcom_qspi_pio_xfer_cfg() 175 pio_xfer_cfg |= xfer->dir; in qcom_qspi_pio_xfer_cfg() 176 if (xfer->is_last) in qcom_qspi_pio_xfer_cfg() 181 pio_xfer_cfg |= qspi_buswidth_to_iomode(ctrl, xfer->buswidth); in qcom_qspi_pio_xfer_cfg() 192 pio_xfer_ctrl |= ctrl->xfer.rem_bytes; in qcom_qspi_pio_xfer_ctrl() 206 if (ctrl->xfer.dir == QSPI_WRITE) in qcom_qspi_pio_xfer() 224 ctrl->xfer.rem_bytes = 0; in qcom_qspi_handle_err() 262 struct spi_transfer *xfer) in qcom_qspi_transfer_one() argument [all …]
|
D | spi-slave-mt27xx.c | 164 struct spi_transfer *xfer) in mtk_spi_slave_fifo_transfer() argument 172 if (xfer->rx_buf) in mtk_spi_slave_fifo_transfer() 174 if (xfer->tx_buf) in mtk_spi_slave_fifo_transfer() 178 cnt = xfer->len / 4; in mtk_spi_slave_fifo_transfer() 179 if (xfer->tx_buf) in mtk_spi_slave_fifo_transfer() 181 xfer->tx_buf, cnt); in mtk_spi_slave_fifo_transfer() 183 remainder = xfer->len % 4; in mtk_spi_slave_fifo_transfer() 184 if (xfer->tx_buf && remainder > 0) { in mtk_spi_slave_fifo_transfer() 186 memcpy(®_val, xfer->tx_buf + cnt * 4, remainder); in mtk_spi_slave_fifo_transfer() 201 struct spi_transfer *xfer) in mtk_spi_slave_dma_transfer() argument [all …]
|
D | spi-axi-spi-engine.c | 131 struct spi_device *spi, struct spi_transfer *xfer) in spi_engine_get_clk_div() argument 136 xfer->speed_hz * 2); in spi_engine_get_clk_div() 146 struct spi_transfer *xfer) in spi_engine_gen_xfer() argument 148 unsigned int len = xfer->len; in spi_engine_gen_xfer() 154 if (xfer->tx_buf) in spi_engine_gen_xfer() 156 if (xfer->rx_buf) in spi_engine_gen_xfer() 167 struct spi_transfer *xfer) in spi_engine_gen_sleep() argument 173 delay = spi_delay_to_ns(&xfer->delay, xfer); in spi_engine_gen_sleep() 205 struct spi_transfer *xfer; in spi_engine_compile_message() local 215 list_for_each_entry(xfer, &msg->transfers, transfer_list) { in spi_engine_compile_message() [all …]
|
D | spi.c | 289 struct spi_transfer *xfer, in spi_statistics_add_transfer_stats() argument 293 int l2len = min(fls(xfer->len), SPI_STATISTICS_HISTO_SIZE) - 1; in spi_statistics_add_transfer_stats() 303 stats->bytes += xfer->len; in spi_statistics_add_transfer_stats() 304 if ((xfer->tx_buf) && in spi_statistics_add_transfer_stats() 305 (xfer->tx_buf != ctlr->dummy_tx)) in spi_statistics_add_transfer_stats() 306 stats->bytes_tx += xfer->len; in spi_statistics_add_transfer_stats() 307 if ((xfer->rx_buf) && in spi_statistics_add_transfer_stats() 308 (xfer->rx_buf != ctlr->dummy_rx)) in spi_statistics_add_transfer_stats() 309 stats->bytes_rx += xfer->len; in spi_statistics_add_transfer_stats() 1018 struct spi_transfer *xfer; in __spi_map_msg() local [all …]
|
/drivers/iio/imu/ |
D | adis_buffer.c | 36 adis->xfer = kcalloc(2, sizeof(*adis->xfer), GFP_KERNEL); in adis_update_scan_mode_burst() 37 if (!adis->xfer) in adis_update_scan_mode_burst() 42 kfree(adis->xfer); in adis_update_scan_mode_burst() 43 adis->xfer = NULL; in adis_update_scan_mode_burst() 51 adis->xfer[0].tx_buf = tx; in adis_update_scan_mode_burst() 52 adis->xfer[0].bits_per_word = 8; in adis_update_scan_mode_burst() 53 adis->xfer[0].len = 2; in adis_update_scan_mode_burst() 55 adis->xfer[0].speed_hz = adis->data->burst_max_speed_hz; in adis_update_scan_mode_burst() 56 adis->xfer[1].rx_buf = adis->buffer; in adis_update_scan_mode_burst() 57 adis->xfer[1].bits_per_word = 8; in adis_update_scan_mode_burst() [all …]
|
/drivers/dma/dw-edma/ |
D | dw-edma-core.c | 333 dw_edma_device_transfer(struct dw_edma_transfer *xfer) in dw_edma_device_transfer() argument 335 struct dw_edma_chan *chan = dchan2dw_edma_chan(xfer->dchan); in dw_edma_device_transfer() 336 enum dma_transfer_direction dir = xfer->direction; in dw_edma_device_transfer() 365 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer() 366 if (!xfer->xfer.cyclic.len || !xfer->xfer.cyclic.cnt) in dw_edma_device_transfer() 368 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer() 369 if (xfer->xfer.sg.len < 1) in dw_edma_device_transfer() 371 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer() 372 if (!xfer->xfer.il->numf) in dw_edma_device_transfer() 374 if (xfer->xfer.il->numf > 0 && xfer->xfer.il->frame_size > 0) in dw_edma_device_transfer() [all …]
|
/drivers/gpu/drm/via/ |
D | via_dmablit.c | 99 const drm_via_dmablit_t *xfer, in via_map_blit_for_device() argument 105 unsigned char *mem_addr = xfer->mem_addr; in via_map_blit_for_device() 108 uint32_t fb_addr = xfer->fb_addr; in via_map_blit_for_device() 120 for (cur_line = 0; cur_line < xfer->num_lines; ++cur_line) { in via_map_blit_for_device() 122 line_len = xfer->line_length; in via_map_blit_for_device() 156 mem_addr += xfer->mem_stride; in via_map_blit_for_device() 157 fb_addr += xfer->fb_stride; in via_map_blit_for_device() 232 via_lock_all_dma_pages(drm_via_sg_info_t *vsg, drm_via_dmablit_t *xfer) in via_lock_all_dma_pages() argument 235 unsigned long first_pfn = VIA_PFN(xfer->mem_addr); in via_lock_all_dma_pages() 236 vsg->num_pages = VIA_PFN(xfer->mem_addr + (xfer->num_lines * xfer->mem_stride - 1)) - in via_lock_all_dma_pages() [all …]
|
/drivers/staging/greybus/ |
D | spilib.c | 151 static struct spi_transfer *get_next_xfer(struct spi_transfer *xfer, in get_next_xfer() argument 154 if (xfer == list_last_entry(&msg->transfers, struct spi_transfer, in get_next_xfer() 158 return list_next_entry(xfer, transfer_list); in get_next_xfer() 167 struct spi_transfer *xfer; in gb_spi_operation_create() local 178 xfer = spi->first_xfer; in gb_spi_operation_create() 184 spi->last_xfer = xfer; in gb_spi_operation_create() 186 if (!xfer->tx_buf && !xfer->rx_buf) { in gb_spi_operation_create() 188 "bufferless transfer, length %u\n", xfer->len); in gb_spi_operation_create() 196 if (xfer->tx_buf) { in gb_spi_operation_create() 197 len = xfer->len - spi->tx_xfer_offset; in gb_spi_operation_create() [all …]
|
/drivers/i3c/master/ |
D | dw-i3c-master.c | 360 struct dw_i3c_xfer *xfer; in dw_i3c_master_alloc_xfer() local 362 xfer = kzalloc(struct_size(xfer, cmds, ncmds), GFP_KERNEL); in dw_i3c_master_alloc_xfer() 363 if (!xfer) in dw_i3c_master_alloc_xfer() 366 INIT_LIST_HEAD(&xfer->node); in dw_i3c_master_alloc_xfer() 367 xfer->ncmds = ncmds; in dw_i3c_master_alloc_xfer() 368 xfer->ret = -ETIMEDOUT; in dw_i3c_master_alloc_xfer() 370 return xfer; in dw_i3c_master_alloc_xfer() 373 static void dw_i3c_master_free_xfer(struct dw_i3c_xfer *xfer) in dw_i3c_master_free_xfer() argument 375 kfree(xfer); in dw_i3c_master_free_xfer() 380 struct dw_i3c_xfer *xfer = master->xferqueue.cur; in dw_i3c_master_start_xfer_locked() local [all …]
|
D | svc-i3c-master.c | 1002 struct svc_i3c_xfer *xfer; in svc_i3c_master_alloc_xfer() local 1004 xfer = kzalloc(struct_size(xfer, cmds, ncmds), GFP_KERNEL); in svc_i3c_master_alloc_xfer() 1005 if (!xfer) in svc_i3c_master_alloc_xfer() 1008 INIT_LIST_HEAD(&xfer->node); in svc_i3c_master_alloc_xfer() 1009 xfer->ncmds = ncmds; in svc_i3c_master_alloc_xfer() 1010 xfer->ret = -ETIMEDOUT; in svc_i3c_master_alloc_xfer() 1012 return xfer; in svc_i3c_master_alloc_xfer() 1015 static void svc_i3c_master_free_xfer(struct svc_i3c_xfer *xfer) in svc_i3c_master_free_xfer() argument 1017 kfree(xfer); in svc_i3c_master_free_xfer() 1021 struct svc_i3c_xfer *xfer) in svc_i3c_master_dequeue_xfer_locked() argument [all …]
|
/drivers/net/ethernet/micrel/ |
D | ks8851_spi.c | 120 struct spi_transfer *xfer = &kss->spi_xfer1; in ks8851_wrreg16_spi() local 128 xfer->tx_buf = txb; in ks8851_wrreg16_spi() 129 xfer->rx_buf = NULL; in ks8851_wrreg16_spi() 130 xfer->len = 4; in ks8851_wrreg16_spi() 151 struct spi_transfer *xfer; in ks8851_rdreg() local 161 xfer = kss->spi_xfer2; in ks8851_rdreg() 163 xfer->tx_buf = txb; in ks8851_rdreg() 164 xfer->rx_buf = NULL; in ks8851_rdreg() 165 xfer->len = 2; in ks8851_rdreg() 167 xfer++; in ks8851_rdreg() [all …]
|
/drivers/gpu/drm/bridge/ |
D | nwl-dsi.c | 113 struct nwl_dsi_transfer *xfer; member 373 struct nwl_dsi_transfer *xfer = dsi->xfer; in nwl_dsi_read_packet() local 375 u8 *payload = xfer->msg->rx_buf; in nwl_dsi_read_packet() 381 xfer->status = 0; in nwl_dsi_read_packet() 383 if (xfer->rx_word_count == 0) { in nwl_dsi_read_packet() 390 xfer->status = err; in nwl_dsi_read_packet() 395 if (channel != xfer->msg->channel) { in nwl_dsi_read_packet() 398 xfer->cmd, channel, xfer->msg->channel); in nwl_dsi_read_packet() 399 xfer->status = -EINVAL; in nwl_dsi_read_packet() 406 if (xfer->msg->rx_len > 1) { in nwl_dsi_read_packet() [all …]
|
/drivers/net/can/spi/mcp251xfd/ |
D | mcp251xfd-regmap.c | 31 struct spi_transfer xfer[] = { in mcp251xfd_regmap_nocrc_gather_write() local 47 return spi_sync_transfer(spi, xfer, ARRAY_SIZE(xfer)); in mcp251xfd_regmap_nocrc_gather_write() 96 struct spi_transfer xfer[2] = { }; in mcp251xfd_regmap_nocrc_update_bits() local 100 spi_message_add_tail(&xfer[0], &msg); in mcp251xfd_regmap_nocrc_update_bits() 103 xfer[0].tx_buf = buf_tx; in mcp251xfd_regmap_nocrc_update_bits() 104 xfer[0].len = sizeof(buf_tx->cmd); in mcp251xfd_regmap_nocrc_update_bits() 106 xfer[1].rx_buf = buf_rx->data; in mcp251xfd_regmap_nocrc_update_bits() 107 xfer[1].len = len; in mcp251xfd_regmap_nocrc_update_bits() 108 spi_message_add_tail(&xfer[1], &msg); in mcp251xfd_regmap_nocrc_update_bits() 110 xfer[0].tx_buf = buf_tx; in mcp251xfd_regmap_nocrc_update_bits() [all …]
|
/drivers/input/touchscreen/ |
D | ad7877.c | 147 struct spi_transfer xfer[6]; member 177 struct spi_transfer xfer[AD7877_NR_SENSE + 2]; member 212 req->xfer[0].tx_buf = &req->command; in ad7877_read() 213 req->xfer[0].len = 2; in ad7877_read() 214 req->xfer[0].cs_change = 1; in ad7877_read() 216 req->xfer[1].rx_buf = &req->sample; in ad7877_read() 217 req->xfer[1].len = 2; in ad7877_read() 219 spi_message_add_tail(&req->xfer[0], &req->msg); in ad7877_read() 220 spi_message_add_tail(&req->xfer[1], &req->msg); in ad7877_read() 242 req->xfer[0].tx_buf = &req->command; in ad7877_write() [all …]
|
/drivers/i2c/busses/ |
D | i2c-mlxcpld.c | 73 struct mlxcpld_i2c_curr_xfer xfer; member 216 priv->xfer.msg = msgs; in mlxcpld_i2c_set_transf_data() 217 priv->xfer.msg_num = num; in mlxcpld_i2c_set_transf_data() 225 priv->xfer.cmd = msgs[num - 1].flags & I2C_M_RD; in mlxcpld_i2c_set_transf_data() 227 if (priv->xfer.cmd == I2C_M_RD && comm_len != msgs[0].len) { in mlxcpld_i2c_set_transf_data() 228 priv->xfer.addr_width = msgs[0].len; in mlxcpld_i2c_set_transf_data() 229 priv->xfer.data_len = comm_len - priv->xfer.addr_width; in mlxcpld_i2c_set_transf_data() 231 priv->xfer.addr_width = 0; in mlxcpld_i2c_set_transf_data() 232 priv->xfer.data_len = comm_len; in mlxcpld_i2c_set_transf_data() 302 if (priv->xfer.cmd != I2C_M_RD) in mlxcpld_i2c_wait_for_tc() [all …]
|