Home
last modified time | relevance | path

Searched refs:cmds (Results 1 – 25 of 75) sorted by relevance

123

/drivers/media/pci/saa7164/
Dsaa7164-cmd.c18 if (dev->cmds[i].inuse == 0) { in saa7164_cmd_alloc_seqno()
19 dev->cmds[i].inuse = 1; in saa7164_cmd_alloc_seqno()
20 dev->cmds[i].signalled = 0; in saa7164_cmd_alloc_seqno()
21 dev->cmds[i].timeout = 0; in saa7164_cmd_alloc_seqno()
22 ret = dev->cmds[i].seqno; in saa7164_cmd_alloc_seqno()
34 if ((dev->cmds[seqno].inuse == 1) && in saa7164_cmd_free_seqno()
35 (dev->cmds[seqno].seqno == seqno)) { in saa7164_cmd_free_seqno()
36 dev->cmds[seqno].inuse = 0; in saa7164_cmd_free_seqno()
37 dev->cmds[seqno].signalled = 0; in saa7164_cmd_free_seqno()
38 dev->cmds[seqno].timeout = 0; in saa7164_cmd_free_seqno()
[all …]
/drivers/infiniband/core/
Droce_gid_mgmt.c69 struct netdev_event_work_cmd cmds[ROCE_NETDEV_CALLBACK_SZ]; member
626 for (i = 0; i < ARRAY_SIZE(work->cmds) && work->cmds[i].cb; i++) { in netdevice_event_work_handler()
627 ib_enum_all_roce_netdevs(work->cmds[i].filter, in netdevice_event_work_handler()
628 work->cmds[i].filter_ndev, in netdevice_event_work_handler()
629 work->cmds[i].cb, in netdevice_event_work_handler()
630 work->cmds[i].ndev); in netdevice_event_work_handler()
631 dev_put(work->cmds[i].ndev); in netdevice_event_work_handler()
632 dev_put(work->cmds[i].filter_ndev); in netdevice_event_work_handler()
638 static int netdevice_queue_work(struct netdev_event_work_cmd *cmds, in netdevice_queue_work() argument
648 memcpy(ndev_work->cmds, cmds, sizeof(ndev_work->cmds)); in netdevice_queue_work()
[all …]
/drivers/gpu/drm/i915/gt/
Dgen7_renderclear.c382 struct batch_chunk cmds, state; in emit_batch() local
386 batch_init(&cmds, vma, start, 0, bv->state_start); in emit_batch()
396 gen7_emit_pipeline_flush(&cmds); in emit_batch()
397 gen7_emit_pipeline_invalidate(&cmds); in emit_batch()
398 batch_add(&cmds, MI_LOAD_REGISTER_IMM(2)); in emit_batch()
399 batch_add(&cmds, i915_mmio_reg_offset(CACHE_MODE_0_GEN7)); in emit_batch()
400 batch_add(&cmds, 0xffff0000 | in emit_batch()
404 batch_add(&cmds, i915_mmio_reg_offset(CACHE_MODE_1)); in emit_batch()
405 batch_add(&cmds, 0xffff0000 | PIXEL_SUBSPAN_COLLECT_OPT_DISABLE); in emit_batch()
406 gen7_emit_pipeline_invalidate(&cmds); in emit_batch()
[all …]
/drivers/gpu/drm/vmwgfx/
Dvmwgfx_overlay.c111 } *cmds; in vmw_overlay_send_put() local
123 fifo_size = sizeof(*cmds) + sizeof(*flush) + sizeof(*items) * num_items; in vmw_overlay_send_put()
125 cmds = VMW_CMD_RESERVE(dev_priv, fifo_size); in vmw_overlay_send_put()
127 if (!cmds) in vmw_overlay_send_put()
130 items = (typeof(items))&cmds[1]; in vmw_overlay_send_put()
134 fill_escape(&cmds->escape, sizeof(*items) * (num_items + 1)); in vmw_overlay_send_put()
136 cmds->header.cmdType = SVGA_ESCAPE_VMWARE_VIDEO_SET_REGS; in vmw_overlay_send_put()
137 cmds->header.streamId = arg->stream_id; in vmw_overlay_send_put()
191 } *cmds; in vmw_overlay_send_stop() local
195 cmds = VMW_CMD_RESERVE(dev_priv, sizeof(*cmds)); in vmw_overlay_send_stop()
[all …]
/drivers/gpu/drm/i915/gt/uc/
Dintel_guc_ct.c147 u32 *cmds, u32 size_in_bytes, u32 resv_space) in guc_ct_buffer_init() argument
152 ctb->cmds = cmds; in guc_ct_buffer_init()
237 u32 *cmds; in intel_guc_ct_init() local
258 cmds = blob + 2 * CTB_DESC_SIZE; in intel_guc_ct_init()
262 ptrdiff(desc, blob), ptrdiff(cmds, blob), cmds_size, in intel_guc_ct_init()
265 guc_ct_buffer_init(&ct->ctbs.send, desc, cmds, cmds_size, resv_space); in intel_guc_ct_init()
269 cmds = blob + 2 * CTB_DESC_SIZE + CTB_H2G_BUFFER_SIZE; in intel_guc_ct_init()
273 ptrdiff(desc, blob), ptrdiff(cmds, blob), cmds_size, in intel_guc_ct_init()
276 guc_ct_buffer_init(&ct->ctbs.recv, desc, cmds, cmds_size, resv_space); in intel_guc_ct_init()
305 u32 base, desc, cmds; in intel_guc_ct_enable() local
[all …]
/drivers/net/wireless/intel/iwlwifi/fw/
Dnotif-wait.c47 if (w->cmds[i] == rec_id || in iwl_notification_wait()
48 (!iwl_cmd_groupid(w->cmds[i]) && in iwl_notification_wait()
49 DEF_ID(w->cmds[i]) == rec_id)) { in iwl_notification_wait()
85 const u16 *cmds, int n_cmds, in iwl_init_notification_wait() argument
96 memcpy(wait_entry->cmds, cmds, n_cmds * sizeof(u16)); in iwl_init_notification_wait()
Dnotif-wait.h52 u16 cmds[MAX_NOTIF_CMDS]; member
82 const u16 *cmds, int n_cmds,
/drivers/dma/
Dbcm-sba-raid.c123 struct brcm_sba_command cmds[]; member
564 struct brcm_sba_command *cmds, in sba_fillup_interrupt_msg() argument
570 struct brcm_sba_command *cmdsp = cmds; in sba_fillup_interrupt_msg()
616 msg->sba.cmds = cmds; in sba_fillup_interrupt_msg()
617 msg->sba.cmds_count = cmdsp - cmds; in sba_fillup_interrupt_msg()
640 sba_fillup_interrupt_msg(req, req->cmds, &req->msg); in sba_prep_dma_interrupt()
650 struct brcm_sba_command *cmds, in sba_fillup_memcpy_msg() argument
658 struct brcm_sba_command *cmdsp = cmds; in sba_fillup_memcpy_msg()
704 msg->sba.cmds = cmds; in sba_fillup_memcpy_msg()
705 msg->sba.cmds_count = cmdsp - cmds; in sba_fillup_memcpy_msg()
[all …]
/drivers/gpu/host1x/
Djob.c66 job->cmds = num_cmdbufs ? mem : NULL; in host1x_job_alloc()
110 struct host1x_job_gather *gather = &job->cmds[job->num_cmds].gather; in host1x_job_add_gather()
123 struct host1x_job_cmd *cmd = &job->cmds[job->num_cmds]; in host1x_job_add_wait()
235 if (job->cmds[i].is_wait) in pin_job()
238 g = &job->cmds[i].gather; in pin_job()
575 if (job->cmds[i].is_wait) in copy_gathers()
578 g = &job->cmds[i].gather; in copy_gathers()
604 if (job->cmds[i].is_wait) in copy_gathers()
606 g = &job->cmds[i].gather; in copy_gathers()
653 if (job->cmds[i].is_wait) in host1x_job_pin()
[all …]
/drivers/crypto/ccp/
Dccp-crypto-main.c54 struct list_head cmds; member
110 list_for_each_entry_continue(tmp, &req_queue.cmds, entry) { in ccp_crypto_cmd_complete()
121 if (req_queue.backlog != &req_queue.cmds) { in ccp_crypto_cmd_complete()
232 list_for_each_entry(tmp, &req_queue.cmds, entry) { in ccp_crypto_enqueue_cmd()
248 if (req_queue.backlog == &req_queue.cmds) in ccp_crypto_enqueue_cmd()
254 list_add_tail(&crypto_cmd->entry, &req_queue.cmds); in ccp_crypto_enqueue_cmd()
413 INIT_LIST_HEAD(&req_queue.cmds); in ccp_crypto_init()
414 req_queue.backlog = &req_queue.cmds; in ccp_crypto_init()
/drivers/interconnect/qcom/
Dbcm-voter.c251 struct tcs_cmd cmds[MAX_BCMS]; in qcom_icc_bcm_voter_commit() local
275 tcs_list_gen(voter, QCOM_ICC_BUCKET_AMC, cmds, commit_idx); in qcom_icc_bcm_voter_commit()
282 cmds, commit_idx); in qcom_icc_bcm_voter_commit()
311 tcs_list_gen(voter, QCOM_ICC_BUCKET_WAKE, cmds, commit_idx); in qcom_icc_bcm_voter_commit()
313 ret = rpmh_write_batch(voter->dev, RPMH_WAKE_ONLY_STATE, cmds, commit_idx); in qcom_icc_bcm_voter_commit()
319 tcs_list_gen(voter, QCOM_ICC_BUCKET_SLEEP, cmds, commit_idx); in qcom_icc_bcm_voter_commit()
321 ret = rpmh_write_batch(voter->dev, RPMH_SLEEP_STATE, cmds, commit_idx); in qcom_icc_bcm_voter_commit()
/drivers/media/platform/vsp1/
Dvsp1_dl.c160 struct vsp1_dl_ext_cmd *cmds; member
439 pool->cmds = kcalloc(num_cmds, sizeof(*pool->cmds), GFP_KERNEL); in vsp1_dl_cmd_pool_create()
440 if (!pool->cmds) { in vsp1_dl_cmd_pool_create()
453 kfree(pool->cmds); in vsp1_dl_cmd_pool_create()
459 struct vsp1_dl_ext_cmd *cmd = &pool->cmds[i]; in vsp1_dl_cmd_pool_create()
473 cmd->cmds = pool->mem + cmd_offset; in vsp1_dl_cmd_pool_create()
528 kfree(pool->cmds); in vsp1_dl_ext_cmd_pool_destroy()
745 cmd->cmds[0].opcode = cmd->opcode; in vsp1_dl_ext_cmd_fill_header()
746 cmd->cmds[0].flags = cmd->flags; in vsp1_dl_ext_cmd_fill_header()
747 cmd->cmds[0].address_set = cmd->data_dma; in vsp1_dl_ext_cmd_fill_header()
[all …]
/drivers/iio/adc/
Dti-adc108s102.c117 unsigned int bit, cmds; in adc108s102_update_scan_mode() local
123 cmds = 0; in adc108s102_update_scan_mode()
125 st->tx_buf[cmds++] = cpu_to_be16(ADC108S102_CMD(bit)); in adc108s102_update_scan_mode()
128 st->tx_buf[cmds++] = 0x00; in adc108s102_update_scan_mode()
133 st->ring_xfer.len = cmds * sizeof(st->tx_buf[0]); in adc108s102_update_scan_mode()
/drivers/nvme/target/
Drdma.c101 struct nvmet_rdma_cmd *cmds; member
126 struct nvmet_rdma_cmd *cmds; member
371 struct nvmet_rdma_cmd *cmds; in nvmet_rdma_alloc_cmds() local
374 cmds = kcalloc(nr_cmds, sizeof(struct nvmet_rdma_cmd), GFP_KERNEL); in nvmet_rdma_alloc_cmds()
375 if (!cmds) in nvmet_rdma_alloc_cmds()
379 ret = nvmet_rdma_alloc_cmd(ndev, cmds + i, admin); in nvmet_rdma_alloc_cmds()
384 return cmds; in nvmet_rdma_alloc_cmds()
388 nvmet_rdma_free_cmd(ndev, cmds + i, admin); in nvmet_rdma_alloc_cmds()
389 kfree(cmds); in nvmet_rdma_alloc_cmds()
395 struct nvmet_rdma_cmd *cmds, int nr_cmds, bool admin) in nvmet_rdma_free_cmds() argument
[all …]
Dtcp.c112 struct nvmet_tcp_cmd *cmds; member
179 return cmd - queue->cmds; in nvmet_tcp_cmd_tag()
943 cmd = &queue->cmds[data->ttag]; in nvmet_tcp_handle_h2c_data_pdu()
1380 struct nvmet_tcp_cmd *cmds; in nvmet_tcp_alloc_cmds() local
1383 cmds = kcalloc(nr_cmds, sizeof(struct nvmet_tcp_cmd), GFP_KERNEL); in nvmet_tcp_alloc_cmds()
1384 if (!cmds) in nvmet_tcp_alloc_cmds()
1388 ret = nvmet_tcp_alloc_cmd(queue, cmds + i); in nvmet_tcp_alloc_cmds()
1393 queue->cmds = cmds; in nvmet_tcp_alloc_cmds()
1398 nvmet_tcp_free_cmd(cmds + i); in nvmet_tcp_alloc_cmds()
1399 kfree(cmds); in nvmet_tcp_alloc_cmds()
[all …]
/drivers/mailbox/
Dbcm-flexrm-mailbox.c719 if (!msg->sba.cmds || !msg->sba.cmds_count) in flexrm_sba_sanity_check()
723 if (((msg->sba.cmds[i].flags & BRCM_SBA_CMD_TYPE_B) || in flexrm_sba_sanity_check()
724 (msg->sba.cmds[i].flags & BRCM_SBA_CMD_TYPE_C)) && in flexrm_sba_sanity_check()
725 (msg->sba.cmds[i].flags & BRCM_SBA_CMD_HAS_OUTPUT)) in flexrm_sba_sanity_check()
727 if ((msg->sba.cmds[i].flags & BRCM_SBA_CMD_TYPE_B) && in flexrm_sba_sanity_check()
728 (msg->sba.cmds[i].data_len > SRCT_LENGTH_MASK)) in flexrm_sba_sanity_check()
730 if ((msg->sba.cmds[i].flags & BRCM_SBA_CMD_TYPE_C) && in flexrm_sba_sanity_check()
731 (msg->sba.cmds[i].data_len > SRCT_LENGTH_MASK)) in flexrm_sba_sanity_check()
733 if ((msg->sba.cmds[i].flags & BRCM_SBA_CMD_HAS_RESP) && in flexrm_sba_sanity_check()
734 (msg->sba.cmds[i].resp_len > DSTT_LENGTH_MASK)) in flexrm_sba_sanity_check()
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/
Dfs_core.c473 err = root->cmds->destroy_flow_table(root, ft); in del_hw_flow_table()
507 err = root->cmds->update_fte(root, ft, fg, fte->modify_mask, fte); in modify_fte()
571 err = root->cmds->delete_fte(root, ft, fte); in del_hw_fte()
611 if (fg->node.active && root->cmds->destroy_flow_group(root, ft, fg)) in del_hw_flow_group()
896 err = root->cmds->modify_flow_table(root, iter, ft); in connect_fts_in_prio()
977 err = root->cmds->update_root_ft(root, ft, qpn, false); in update_root_ft_create()
981 err = root->cmds->update_root_ft(root, ft, in update_root_ft_create()
1017 err = root->cmds->update_fte(root, ft, fg, in _mlx5_modify_rule_destination()
1173 err = root->cmds->create_flow_table(root, ft, ft_attr->max_fte, next_ft); in __mlx5_create_flow_table()
1197 root->cmds->destroy_flow_table(root, ft); in __mlx5_create_flow_table()
[all …]
/drivers/iommu/arm/arm-smmu-v3/
Darm-smmu-v3.c716 static void arm_smmu_cmdq_write_entries(struct arm_smmu_cmdq *cmdq, u64 *cmds, in arm_smmu_cmdq_write_entries() argument
726 u64 *cmd = &cmds[i * CMDQ_ENT_DWORDS]; in arm_smmu_cmdq_write_entries()
750 u64 *cmds, int n, bool sync) in arm_smmu_cmdq_issue_cmdlist() argument
793 arm_smmu_cmdq_write_entries(cmdq, cmds, llq.prod, n); in arm_smmu_cmdq_issue_cmdlist()
897 struct arm_smmu_cmdq_batch *cmds, in arm_smmu_cmdq_batch_add() argument
900 if (cmds->num == CMDQ_BATCH_ENTRIES - 1 && in arm_smmu_cmdq_batch_add()
902 arm_smmu_cmdq_issue_cmdlist(smmu, cmds->cmds, cmds->num, true); in arm_smmu_cmdq_batch_add()
903 cmds->num = 0; in arm_smmu_cmdq_batch_add()
906 if (cmds->num == CMDQ_BATCH_ENTRIES) { in arm_smmu_cmdq_batch_add()
907 arm_smmu_cmdq_issue_cmdlist(smmu, cmds->cmds, cmds->num, false); in arm_smmu_cmdq_batch_add()
[all …]
/drivers/i3c/master/
Ddw-i3c-master.c224 struct dw_i3c_cmd cmds[]; member
362 xfer = kzalloc(struct_size(xfer, cmds, ncmds), GFP_KERNEL); in dw_i3c_master_alloc_xfer()
388 struct dw_i3c_cmd *cmd = &xfer->cmds[i]; in dw_i3c_master_start_xfer_locked()
399 struct dw_i3c_cmd *cmd = &xfer->cmds[i]; in dw_i3c_master_start_xfer_locked()
469 cmd = &xfer->cmds[RESPONSE_PORT_TID(resp)]; in dw_i3c_master_end_xfer_locked()
478 switch (xfer->cmds[i].error) { in dw_i3c_master_end_xfer_locked()
678 cmd = xfer->cmds; in dw_i3c_ccc_set()
696 if (xfer->cmds[0].error == RESPONSE_ERROR_IBA_NACK) in dw_i3c_ccc_set()
718 cmd = xfer->cmds; in dw_i3c_ccc_get()
737 if (xfer->cmds[0].error == RESPONSE_ERROR_IBA_NACK) in dw_i3c_ccc_get()
[all …]
/drivers/gpu/drm/tegra/
Dsubmit.c373 struct drm_tegra_submit_cmd *cmds; in submit_create_job() local
381 cmds = alloc_copy_user_array(u64_to_user_ptr(args->cmds_ptr), args->num_cmds, in submit_create_job()
382 sizeof(*cmds)); in submit_create_job()
383 if (IS_ERR(cmds)) { in submit_create_job()
385 return ERR_CAST(cmds); in submit_create_job()
404 struct drm_tegra_submit_cmd *cmd = &cmds[i]; in submit_create_job()
461 kvfree(cmds); in submit_create_job()
/drivers/soc/qcom/
Drpmh.c31 .cmds = name.cmd, \
90 rpm_msg->msg.cmds[0].addr, r); in rpmh_tx_done()
186 req = cache_rpm_request(ctrlr, state, &rpm_msg->msg.cmds[i]); in __rpmh_write()
212 req->msg.cmds = req->cmd; in __fill_rpmh_msg()
379 ret, rpm_msgs[i].msg.cmds[0].addr); in rpmh_write_batch()
Dtrace-rpmh.h32 __entry->addr = r->cmds[0].addr;
33 __entry->data = r->cmds[0].data;
/drivers/gpu/drm/msm/adreno/
Da5xx_power.c330 unsigned int *data, *ptr, *cmds; in a5xx_gpmu_ucode_init() local
356 cmds = data + data[2] + 3; in a5xx_gpmu_ucode_init()
382 ptr[dwords++] = *cmds++; in a5xx_gpmu_ucode_init()
/drivers/input/touchscreen/
Dads7846.c84 unsigned int cmds; member
781 for (cmd_idx = packet->last_cmd_idx; cmd_idx < packet->cmds - 1; cmd_idx++) { in ads7846_filter()
1063 packet->cmds = 5; /* x, y, z1, z2, pwdown */ in ads7846_setup_spi_msg()
1065 packet->cmds = 3; /* x, y, pwdown */ in ads7846_setup_spi_msg()
1067 for (cmd_idx = 0; cmd_idx < packet->cmds; cmd_idx++) { in ads7846_setup_spi_msg()
1071 if (cmd_idx == packet->cmds - 1) in ads7846_setup_spi_msg()
1108 for (cmd_idx = 0; cmd_idx < packet->cmds; cmd_idx++) { in ads7846_setup_spi_msg()
1112 if (cmd_idx == packet->cmds - 1) in ads7846_setup_spi_msg()
/drivers/gpu/host1x/hw/
Ddebug_hw.c212 if (job->cmds[i].is_wait) in show_channel_gathers()
215 g = &job->cmds[i].gather; in show_channel_gathers()

123