Home
last modified time | relevance | path

Searched refs:ret_code (Results 1 – 25 of 47) sorted by relevance

12

/kernel/linux/linux-5.10/drivers/infiniband/hw/i40iw/
Di40iw_virtchnl.c52 enum i40iw_status_code ret_code = I40IW_ERR_NOT_READY; in vchnl_vf_send_get_ver_req() local
56 return ret_code; in vchnl_vf_send_get_ver_req()
63 ret_code = dev->vchnl_if.vchnl_send(dev, 0, (u8 *)vchnl_msg, vchnl_msg->iw_chnl_buf_len); in vchnl_vf_send_get_ver_req()
64 if (ret_code) in vchnl_vf_send_get_ver_req()
66 "%s: virt channel send failed 0x%x\n", __func__, ret_code); in vchnl_vf_send_get_ver_req()
67 return ret_code; in vchnl_vf_send_get_ver_req()
78 enum i40iw_status_code ret_code = I40IW_ERR_NOT_READY; in vchnl_vf_send_get_hmc_fcn_req() local
82 return ret_code; in vchnl_vf_send_get_hmc_fcn_req()
89 ret_code = dev->vchnl_if.vchnl_send(dev, 0, (u8 *)vchnl_msg, vchnl_msg->iw_chnl_buf_len); in vchnl_vf_send_get_hmc_fcn_req()
90 if (ret_code) in vchnl_vf_send_get_hmc_fcn_req()
[all …]
Di40iw_hmc.c185 enum i40iw_status_code ret_code = 0; in i40iw_hmc_sd_grp() local
207 ret_code = dev->cqp->process_cqp_sds(dev, &sdinfo); in i40iw_hmc_sd_grp()
208 if (ret_code) { in i40iw_hmc_sd_grp()
211 ret_code); in i40iw_hmc_sd_grp()
212 return ret_code; in i40iw_hmc_sd_grp()
218 ret_code = dev->cqp->process_cqp_sds(dev, &sdinfo); in i40iw_hmc_sd_grp()
220 return ret_code; in i40iw_hmc_sd_grp()
304 enum i40iw_status_code ret_code = 0; in i40iw_sc_create_hmc_obj() local
332 ret_code = i40iw_add_sd_table_entry(dev->hw, info->hmc_info, in i40iw_sc_create_hmc_obj()
336 if (ret_code) in i40iw_sc_create_hmc_obj()
[all …]
Di40iw_uk.c145 enum i40iw_status_code ret_code = 0; in i40iw_qp_get_next_send_wqe() local
159 I40IW_RING_MOVE_HEAD(qp->sq_ring, ret_code); in i40iw_qp_get_next_send_wqe()
160 if (ret_code) in i40iw_qp_get_next_send_wqe()
171 I40IW_RING_MOVE_HEAD(qp->sq_ring, ret_code); in i40iw_qp_get_next_send_wqe()
172 if (ret_code) in i40iw_qp_get_next_send_wqe()
179 wqe_size / I40IW_QP_WQE_MIN_SIZE, ret_code); in i40iw_qp_get_next_send_wqe()
180 if (ret_code) in i40iw_qp_get_next_send_wqe()
223 enum i40iw_status_code ret_code; in i40iw_qp_get_next_recv_wqe() local
228 I40IW_ATOMIC_RING_MOVE_HEAD(qp->rq_ring, *wqe_idx, ret_code); in i40iw_qp_get_next_recv_wqe()
229 if (ret_code) in i40iw_qp_get_next_recv_wqe()
[all …]
Di40iw_ctrl.c509 enum i40iw_status_code ret_code; in i40iw_sc_cqp_create() local
514 ret_code = i40iw_allocate_dma_mem(cqp->dev->hw, in i40iw_sc_cqp_create()
519 if (ret_code) in i40iw_sc_cqp_create()
552 ret_code = I40IW_ERR_TIMEOUT; in i40iw_sc_cqp_create()
573 if (!ret_code) in i40iw_sc_cqp_create()
575 return ret_code; in i40iw_sc_cqp_create()
608 enum i40iw_status_code ret_code; in i40iw_sc_cqp_get_next_send_wqe_idx() local
620 I40IW_ATOMIC_RING_MOVE_HEAD(cqp->sq_ring, *wqe_idx, ret_code); in i40iw_sc_cqp_get_next_send_wqe_idx()
622 if (ret_code) in i40iw_sc_cqp_get_next_send_wqe_idx()
653 enum i40iw_status_code ret_code = 0; in i40iw_sc_cqp_destroy() local
[all …]
Di40iw_pble.c136 enum i40iw_status_code ret_code = 0; in add_sd_direct() local
145 ret_code = i40iw_add_sd_table_entry(dev->hw, hmc_info, in add_sd_direct()
149 if (ret_code) in add_sd_direct()
150 return ret_code; in add_sd_direct()
332 enum i40iw_status_code ret_code = 0; in add_pble_pool() local
371 ret_code = add_sd_direct(dev, pble_rsrc, &info); in add_pble_pool()
372 if (ret_code) in add_pble_pool()
378 ret_code = add_bp_pages(dev, pble_rsrc, &info); in add_pble_pool()
379 if (ret_code) in add_pble_pool()
388 ret_code = I40IW_ERR_NO_MEMORY; in add_pble_pool()
[all …]
Di40iw_virtchnl.h101 s16 ret_code; member
/kernel/linux/linux-5.10/drivers/net/ethernet/intel/iavf/
Diavf_adminq.c37 enum iavf_status ret_code; in iavf_alloc_adminq_asq_ring() local
39 ret_code = iavf_allocate_dma_mem(hw, &hw->aq.asq.desc_buf, in iavf_alloc_adminq_asq_ring()
44 if (ret_code) in iavf_alloc_adminq_asq_ring()
45 return ret_code; in iavf_alloc_adminq_asq_ring()
47 ret_code = iavf_allocate_virt_mem(hw, &hw->aq.asq.cmd_buf, in iavf_alloc_adminq_asq_ring()
50 if (ret_code) { in iavf_alloc_adminq_asq_ring()
52 return ret_code; in iavf_alloc_adminq_asq_ring()
55 return ret_code; in iavf_alloc_adminq_asq_ring()
64 enum iavf_status ret_code; in iavf_alloc_adminq_arq_ring() local
66 ret_code = iavf_allocate_dma_mem(hw, &hw->aq.arq.desc_buf, in iavf_alloc_adminq_arq_ring()
[all …]
/kernel/linux/linux-5.10/drivers/net/ethernet/intel/i40e/
Di40e_hmc.c30 i40e_status ret_code = I40E_SUCCESS; in i40e_add_sd_table_entry() local
34 ret_code = I40E_ERR_BAD_PTR; in i40e_add_sd_table_entry()
40 ret_code = I40E_ERR_INVALID_SD_INDEX; in i40e_add_sd_table_entry()
56 ret_code = i40e_allocate_dma_mem(hw, &mem, mem_type, alloc_len, in i40e_add_sd_table_entry()
58 if (ret_code) in i40e_add_sd_table_entry()
62 ret_code = i40e_allocate_virt_mem(hw, in i40e_add_sd_table_entry()
65 if (ret_code) in i40e_add_sd_table_entry()
85 if (ret_code) in i40e_add_sd_table_entry()
89 return ret_code; in i40e_add_sd_table_entry()
114 i40e_status ret_code = 0; in i40e_add_pd_table_entry() local
[all …]
Di40e_lan_hmc.c82 i40e_status ret_code = 0; in i40e_init_lan_hmc() local
90 ret_code = i40e_allocate_virt_mem(hw, &hw->hmc.hmc_obj_virt_mem, in i40e_init_lan_hmc()
92 if (ret_code) in i40e_init_lan_hmc()
114 ret_code = I40E_ERR_INVALID_HMC_OBJ_COUNT; in i40e_init_lan_hmc()
116 txq_num, obj->max_cnt, ret_code); in i40e_init_lan_hmc()
137 ret_code = I40E_ERR_INVALID_HMC_OBJ_COUNT; in i40e_init_lan_hmc()
139 rxq_num, obj->max_cnt, ret_code); in i40e_init_lan_hmc()
160 ret_code = I40E_ERR_INVALID_HMC_OBJ_COUNT; in i40e_init_lan_hmc()
162 fcoe_cntx_num, obj->max_cnt, ret_code); in i40e_init_lan_hmc()
183 ret_code = I40E_ERR_INVALID_HMC_OBJ_COUNT; in i40e_init_lan_hmc()
[all …]
Di40e_nvm.c19 i40e_status ret_code = 0; in i40e_init_nvm() local
40 ret_code = I40E_ERR_NVM_BLANK_MODE; in i40e_init_nvm()
44 return ret_code; in i40e_init_nvm()
58 i40e_status ret_code = 0; in i40e_acquire_nvm() local
65 ret_code = i40e_aq_request_resource(hw, I40E_NVM_RESOURCE_ID, access, in i40e_acquire_nvm()
73 if (ret_code) in i40e_acquire_nvm()
76 access, time_left, ret_code, hw->aq.asq_last_status); in i40e_acquire_nvm()
78 if (ret_code && time_left) { in i40e_acquire_nvm()
84 ret_code = i40e_aq_request_resource(hw, in i40e_acquire_nvm()
88 if (!ret_code) { in i40e_acquire_nvm()
[all …]
Di40e_adminq.c52 i40e_status ret_code; in i40e_alloc_adminq_asq_ring() local
54 ret_code = i40e_allocate_dma_mem(hw, &hw->aq.asq.desc_buf, in i40e_alloc_adminq_asq_ring()
59 if (ret_code) in i40e_alloc_adminq_asq_ring()
60 return ret_code; in i40e_alloc_adminq_asq_ring()
62 ret_code = i40e_allocate_virt_mem(hw, &hw->aq.asq.cmd_buf, in i40e_alloc_adminq_asq_ring()
65 if (ret_code) { in i40e_alloc_adminq_asq_ring()
67 return ret_code; in i40e_alloc_adminq_asq_ring()
70 return ret_code; in i40e_alloc_adminq_asq_ring()
79 i40e_status ret_code; in i40e_alloc_adminq_arq_ring() local
81 ret_code = i40e_allocate_dma_mem(hw, &hw->aq.arq.desc_buf, in i40e_alloc_adminq_arq_ring()
[all …]
Di40e_diag.c79 i40e_status ret_code = 0; in i40e_diag_reg_test() local
84 !ret_code; i++) { in i40e_diag_reg_test()
101 for (j = 0; j < i40e_reg_list[i].elements && !ret_code; j++) { in i40e_diag_reg_test()
104 ret_code = i40e_diag_reg_pattern_test(hw, reg, mask); in i40e_diag_reg_test()
108 return ret_code; in i40e_diag_reg_test()
119 i40e_status ret_code; in i40e_diag_eeprom_test() local
123 ret_code = i40e_read_nvm_word(hw, I40E_SR_NVM_CONTROL_WORD, &reg_val); in i40e_diag_eeprom_test()
124 if (!ret_code && in i40e_diag_eeprom_test()
/kernel/linux/linux-5.10/drivers/s390/cio/
Dvfio_ccw_fsm.c187 private->io_region->ret_code = -EIO; in fsm_io_error()
193 private->io_region->ret_code = -EBUSY; in fsm_io_busy()
199 private->io_region->ret_code = -EAGAIN; in fsm_io_retry()
211 cmd_region->ret_code = -EIO; in fsm_async_error()
217 private->cmd_region->ret_code = -EAGAIN; in fsm_async_retry()
257 io_region->ret_code = -EOPNOTSUPP; in fsm_io_request()
265 io_region->ret_code = cp_init(&private->cp, mdev_dev(mdev), in fsm_io_request()
267 if (io_region->ret_code) { in fsm_io_request()
272 io_region->ret_code); in fsm_io_request()
277 io_region->ret_code = cp_prefetch(&private->cp); in fsm_io_request()
[all …]
Dvfio_ccw_async.c60 ret = region->ret_code ? region->ret_code : count; in vfio_ccw_async_region_write()
/kernel/linux/linux-5.10/drivers/net/ethernet/intel/ice/
Dice_controlq.c353 enum ice_status ret_code; in ice_init_sq() local
357 ret_code = ICE_ERR_NOT_READY; in ice_init_sq()
363 ret_code = ICE_ERR_CFG; in ice_init_sq()
371 ret_code = ice_alloc_ctrlq_sq_ring(hw, cq); in ice_init_sq()
372 if (ret_code) in ice_init_sq()
376 ret_code = ice_alloc_sq_bufs(hw, cq); in ice_init_sq()
377 if (ret_code) in ice_init_sq()
381 ret_code = ice_cfg_sq_regs(hw, cq); in ice_init_sq()
382 if (ret_code) in ice_init_sq()
394 return ret_code; in ice_init_sq()
[all …]
/kernel/linux/patches/linux-5.10/prebuilts/usr/include/linux/
Dvfio_ccw.h29 __u32 ret_code; member
35 __u32 ret_code; member
/kernel/linux/linux-5.10/include/uapi/linux/
Dvfio_ccw.h23 __u32 ret_code; member
34 __u32 ret_code; member
/kernel/linux/linux-5.10/fs/hmdfs/comm/
Dsocket_adapter.c206 void *buf, __u32 ret_code) in hmdfs_sendmessage_response() argument
217 head.ret_code = cpu_to_le32(ret_code); in hmdfs_sendmessage_response()
261 if (mp->resp.ret_code == -ETIME) in async_request_cb_on_wakeup_fn()
290 mp->resp.ret_code = -ETIME; in mp_alloc()
431 head->ret_code = cpu_to_le32(sm->ret_code); in hmdfs_sendmessage_request()
531 head.ret_code = cpu_to_le32(0); in hmdfs_send_slice()
660 head.ret_code = cpu_to_le32(sm->ret_code); in hmdfs_sendpage_request()
862 __u32 ret_code, __u32 data_len, void *buf) in hmdfs_response_wakeup() argument
864 msg_info->ret = ret_code; in hmdfs_response_wakeup()
923 ret = le32_to_cpu(desp->head->ret_code); in hmdfs_file_response_work_fn()
[all …]
Dsocket_adapter.h60 void *buf, __u32 ret_code);
158 __u32 ret_code, __u32 data_len, void *buf);
Dprotocol.h32 __le32 ret_code; member
86 __u32 ret_code; member
113 __u32 ret_code; member
/kernel/linux/linux-5.10/sound/soc/intel/atom/sst/
Dsst_pvt.c89 if (block->ret_code < 0) { in sst_wait_interruptible()
91 "stream failed %d\n", block->ret_code); in sst_wait_interruptible()
133 block->ret_code); in sst_wait_timeout()
134 retval = -block->ret_code; in sst_wait_timeout()
/kernel/linux/patches/linux-4.19/prebuilts/usr/include/linux/
Dvfio_ccw.h16 __u32 ret_code; member
/kernel/linux/linux-5.10/fs/hmdfs/
Dhmdfs_trace.h48 __field(__u32, ret_code)
57 __entry->ret_code = cmd->ret_code;
62 __entry->cmd_flag, __entry->data_len, __entry->ret_code)
77 __field(__u32, ret_code)
86 __entry->ret_code = cmd->ret_code;
91 __entry->cmd_flag, __entry->data_len, __entry->ret_code)
/kernel/linux/linux-5.10/include/linux/
Ddrbd_genl_api.h28 __s32 ret_code; member
/kernel/linux/linux-5.10/drivers/mtd/
Dmtdcore.c1440 int ret_code; in mtd_read_oob() local
1444 ret_code = mtd_check_oob_ops(mtd, from, ops); in mtd_read_oob()
1445 if (ret_code) in mtd_read_oob()
1446 return ret_code; in mtd_read_oob()
1455 ret_code = mtd_io_emulated_slc(mtd, from, true, ops); in mtd_read_oob()
1457 ret_code = mtd_read_oob_std(mtd, from, ops); in mtd_read_oob()
1467 if (unlikely(ret_code < 0)) in mtd_read_oob()
1468 return ret_code; in mtd_read_oob()
1471 return ret_code >= mtd->bitflip_threshold ? -EUCLEAN : 0; in mtd_read_oob()

12