/drivers/infiniband/hw/i40iw/ |
D | i40iw_virtchnl.c | 52 enum i40iw_status_code ret_code = I40IW_ERR_NOT_READY; in vchnl_vf_send_get_ver_req() local 56 return ret_code; in vchnl_vf_send_get_ver_req() 63 ret_code = dev->vchnl_if.vchnl_send(dev, 0, (u8 *)vchnl_msg, vchnl_msg->iw_chnl_buf_len); in vchnl_vf_send_get_ver_req() 64 if (ret_code) in vchnl_vf_send_get_ver_req() 66 "%s: virt channel send failed 0x%x\n", __func__, ret_code); in vchnl_vf_send_get_ver_req() 67 return ret_code; in vchnl_vf_send_get_ver_req() 78 enum i40iw_status_code ret_code = I40IW_ERR_NOT_READY; in vchnl_vf_send_get_hmc_fcn_req() local 82 return ret_code; in vchnl_vf_send_get_hmc_fcn_req() 89 ret_code = dev->vchnl_if.vchnl_send(dev, 0, (u8 *)vchnl_msg, vchnl_msg->iw_chnl_buf_len); in vchnl_vf_send_get_hmc_fcn_req() 90 if (ret_code) in vchnl_vf_send_get_hmc_fcn_req() [all …]
|
D | i40iw_hmc.c | 185 enum i40iw_status_code ret_code = 0; in i40iw_hmc_sd_grp() local 207 ret_code = dev->cqp->process_cqp_sds(dev, &sdinfo); in i40iw_hmc_sd_grp() 208 if (ret_code) { in i40iw_hmc_sd_grp() 211 ret_code); in i40iw_hmc_sd_grp() 212 return ret_code; in i40iw_hmc_sd_grp() 218 ret_code = dev->cqp->process_cqp_sds(dev, &sdinfo); in i40iw_hmc_sd_grp() 220 return ret_code; in i40iw_hmc_sd_grp() 304 enum i40iw_status_code ret_code = 0; in i40iw_sc_create_hmc_obj() local 332 ret_code = i40iw_add_sd_table_entry(dev->hw, info->hmc_info, in i40iw_sc_create_hmc_obj() 336 if (ret_code) in i40iw_sc_create_hmc_obj() [all …]
|
D | i40iw_uk.c | 145 enum i40iw_status_code ret_code = 0; in i40iw_qp_get_next_send_wqe() local 159 I40IW_RING_MOVE_HEAD(qp->sq_ring, ret_code); in i40iw_qp_get_next_send_wqe() 160 if (ret_code) in i40iw_qp_get_next_send_wqe() 171 I40IW_RING_MOVE_HEAD(qp->sq_ring, ret_code); in i40iw_qp_get_next_send_wqe() 172 if (ret_code) in i40iw_qp_get_next_send_wqe() 179 wqe_size / I40IW_QP_WQE_MIN_SIZE, ret_code); in i40iw_qp_get_next_send_wqe() 180 if (ret_code) in i40iw_qp_get_next_send_wqe() 223 enum i40iw_status_code ret_code; in i40iw_qp_get_next_recv_wqe() local 228 I40IW_ATOMIC_RING_MOVE_HEAD(qp->rq_ring, *wqe_idx, ret_code); in i40iw_qp_get_next_recv_wqe() 229 if (ret_code) in i40iw_qp_get_next_recv_wqe() [all …]
|
D | i40iw_ctrl.c | 509 enum i40iw_status_code ret_code; in i40iw_sc_cqp_create() local 514 ret_code = i40iw_allocate_dma_mem(cqp->dev->hw, in i40iw_sc_cqp_create() 519 if (ret_code) in i40iw_sc_cqp_create() 552 ret_code = I40IW_ERR_TIMEOUT; in i40iw_sc_cqp_create() 573 if (!ret_code) in i40iw_sc_cqp_create() 575 return ret_code; in i40iw_sc_cqp_create() 608 enum i40iw_status_code ret_code; in i40iw_sc_cqp_get_next_send_wqe_idx() local 620 I40IW_ATOMIC_RING_MOVE_HEAD(cqp->sq_ring, *wqe_idx, ret_code); in i40iw_sc_cqp_get_next_send_wqe_idx() 622 if (ret_code) in i40iw_sc_cqp_get_next_send_wqe_idx() 653 enum i40iw_status_code ret_code = 0; in i40iw_sc_cqp_destroy() local [all …]
|
D | i40iw_pble.c | 136 enum i40iw_status_code ret_code = 0; in add_sd_direct() local 145 ret_code = i40iw_add_sd_table_entry(dev->hw, hmc_info, in add_sd_direct() 149 if (ret_code) in add_sd_direct() 150 return ret_code; in add_sd_direct() 332 enum i40iw_status_code ret_code = 0; in add_pble_pool() local 371 ret_code = add_sd_direct(dev, pble_rsrc, &info); in add_pble_pool() 372 if (ret_code) in add_pble_pool() 378 ret_code = add_bp_pages(dev, pble_rsrc, &info); in add_pble_pool() 379 if (ret_code) in add_pble_pool() 388 ret_code = I40IW_ERR_NO_MEMORY; in add_pble_pool() [all …]
|
D | i40iw_virtchnl.h | 101 s16 ret_code; member
|
D | i40iw_utils.c | 1336 enum i40iw_status_code ret_code = 0; in i40iw_ieq_check_mpacrc() local 1344 ret_code = I40IW_ERR_MPA_CRC; in i40iw_ieq_check_mpacrc() 1346 return ret_code; in i40iw_ieq_check_mpacrc()
|
D | i40iw_puda.c | 209 enum i40iw_status_code ret_code = 0; in i40iw_puda_get_next_send_wqe() local 214 I40IW_RING_MOVE_HEAD(qp->sq_ring, ret_code); in i40iw_puda_get_next_send_wqe() 215 if (ret_code) in i40iw_puda_get_next_send_wqe()
|
D | i40iw_main.c | 1931 int ret_code = I40IW_NOT_SUPPORTED; in i40iw_virtchnl_receive() local 1944 ret_code = dev->vchnl_if.vchnl_recv(dev, vf_id, msg, len); in i40iw_virtchnl_receive() 1950 return ret_code; in i40iw_virtchnl_receive()
|
/drivers/net/ethernet/intel/iavf/ |
D | iavf_adminq.c | 37 enum iavf_status ret_code; in iavf_alloc_adminq_asq_ring() local 39 ret_code = iavf_allocate_dma_mem(hw, &hw->aq.asq.desc_buf, in iavf_alloc_adminq_asq_ring() 44 if (ret_code) in iavf_alloc_adminq_asq_ring() 45 return ret_code; in iavf_alloc_adminq_asq_ring() 47 ret_code = iavf_allocate_virt_mem(hw, &hw->aq.asq.cmd_buf, in iavf_alloc_adminq_asq_ring() 50 if (ret_code) { in iavf_alloc_adminq_asq_ring() 52 return ret_code; in iavf_alloc_adminq_asq_ring() 55 return ret_code; in iavf_alloc_adminq_asq_ring() 64 enum iavf_status ret_code; in iavf_alloc_adminq_arq_ring() local 66 ret_code = iavf_allocate_dma_mem(hw, &hw->aq.arq.desc_buf, in iavf_alloc_adminq_arq_ring() [all …]
|
/drivers/net/ethernet/intel/i40e/ |
D | i40e_lan_hmc.c | 82 i40e_status ret_code = 0; in i40e_init_lan_hmc() local 90 ret_code = i40e_allocate_virt_mem(hw, &hw->hmc.hmc_obj_virt_mem, in i40e_init_lan_hmc() 92 if (ret_code) in i40e_init_lan_hmc() 114 ret_code = I40E_ERR_INVALID_HMC_OBJ_COUNT; in i40e_init_lan_hmc() 116 txq_num, obj->max_cnt, ret_code); in i40e_init_lan_hmc() 137 ret_code = I40E_ERR_INVALID_HMC_OBJ_COUNT; in i40e_init_lan_hmc() 139 rxq_num, obj->max_cnt, ret_code); in i40e_init_lan_hmc() 160 ret_code = I40E_ERR_INVALID_HMC_OBJ_COUNT; in i40e_init_lan_hmc() 162 fcoe_cntx_num, obj->max_cnt, ret_code); in i40e_init_lan_hmc() 183 ret_code = I40E_ERR_INVALID_HMC_OBJ_COUNT; in i40e_init_lan_hmc() [all …]
|
D | i40e_nvm.c | 19 i40e_status ret_code = 0; in i40e_init_nvm() local 40 ret_code = I40E_ERR_NVM_BLANK_MODE; in i40e_init_nvm() 44 return ret_code; in i40e_init_nvm() 58 i40e_status ret_code = 0; in i40e_acquire_nvm() local 65 ret_code = i40e_aq_request_resource(hw, I40E_NVM_RESOURCE_ID, access, in i40e_acquire_nvm() 73 if (ret_code) in i40e_acquire_nvm() 76 access, time_left, ret_code, hw->aq.asq_last_status); in i40e_acquire_nvm() 78 if (ret_code && time_left) { in i40e_acquire_nvm() 84 ret_code = i40e_aq_request_resource(hw, in i40e_acquire_nvm() 88 if (!ret_code) { in i40e_acquire_nvm() [all …]
|
D | i40e_hmc.c | 30 i40e_status ret_code = I40E_SUCCESS; in i40e_add_sd_table_entry() local 34 ret_code = I40E_ERR_BAD_PTR; in i40e_add_sd_table_entry() 40 ret_code = I40E_ERR_INVALID_SD_INDEX; in i40e_add_sd_table_entry() 56 ret_code = i40e_allocate_dma_mem(hw, &mem, mem_type, alloc_len, in i40e_add_sd_table_entry() 58 if (ret_code) in i40e_add_sd_table_entry() 62 ret_code = i40e_allocate_virt_mem(hw, in i40e_add_sd_table_entry() 65 if (ret_code) in i40e_add_sd_table_entry() 85 if (ret_code) in i40e_add_sd_table_entry() 89 return ret_code; in i40e_add_sd_table_entry() 114 i40e_status ret_code = 0; in i40e_add_pd_table_entry() local [all …]
|
D | i40e_adminq.c | 52 i40e_status ret_code; in i40e_alloc_adminq_asq_ring() local 54 ret_code = i40e_allocate_dma_mem(hw, &hw->aq.asq.desc_buf, in i40e_alloc_adminq_asq_ring() 59 if (ret_code) in i40e_alloc_adminq_asq_ring() 60 return ret_code; in i40e_alloc_adminq_asq_ring() 62 ret_code = i40e_allocate_virt_mem(hw, &hw->aq.asq.cmd_buf, in i40e_alloc_adminq_asq_ring() 65 if (ret_code) { in i40e_alloc_adminq_asq_ring() 67 return ret_code; in i40e_alloc_adminq_asq_ring() 70 return ret_code; in i40e_alloc_adminq_asq_ring() 79 i40e_status ret_code; in i40e_alloc_adminq_arq_ring() local 81 ret_code = i40e_allocate_dma_mem(hw, &hw->aq.arq.desc_buf, in i40e_alloc_adminq_arq_ring() [all …]
|
D | i40e_diag.c | 79 i40e_status ret_code = 0; in i40e_diag_reg_test() local 85 !ret_code; i++) { in i40e_diag_reg_test() 102 for (j = 0; j < elements && !ret_code; j++) { in i40e_diag_reg_test() 105 ret_code = i40e_diag_reg_pattern_test(hw, reg, mask); in i40e_diag_reg_test() 109 return ret_code; in i40e_diag_reg_test() 120 i40e_status ret_code; in i40e_diag_eeprom_test() local 124 ret_code = i40e_read_nvm_word(hw, I40E_SR_NVM_CONTROL_WORD, ®_val); in i40e_diag_eeprom_test() 125 if (!ret_code && in i40e_diag_eeprom_test()
|
/drivers/s390/cio/ |
D | vfio_ccw_fsm.c | 187 private->io_region->ret_code = -EIO; in fsm_io_error() 193 private->io_region->ret_code = -EBUSY; in fsm_io_busy() 199 private->io_region->ret_code = -EAGAIN; in fsm_io_retry() 211 cmd_region->ret_code = -EIO; in fsm_async_error() 217 private->cmd_region->ret_code = -EAGAIN; in fsm_async_retry() 257 io_region->ret_code = -EOPNOTSUPP; in fsm_io_request() 265 io_region->ret_code = cp_init(&private->cp, mdev_dev(mdev), in fsm_io_request() 267 if (io_region->ret_code) { in fsm_io_request() 272 io_region->ret_code); in fsm_io_request() 277 io_region->ret_code = cp_prefetch(&private->cp); in fsm_io_request() [all …]
|
D | vfio_ccw_async.c | 60 ret = region->ret_code ? region->ret_code : count; in vfio_ccw_async_region_write()
|
D | vfio_ccw_ops.c | 279 ret = (region->ret_code != 0) ? region->ret_code : count; in vfio_ccw_mdev_write_io_region()
|
/drivers/net/ethernet/intel/ice/ |
D | ice_controlq.c | 353 enum ice_status ret_code; in ice_init_sq() local 357 ret_code = ICE_ERR_NOT_READY; in ice_init_sq() 363 ret_code = ICE_ERR_CFG; in ice_init_sq() 371 ret_code = ice_alloc_ctrlq_sq_ring(hw, cq); in ice_init_sq() 372 if (ret_code) in ice_init_sq() 376 ret_code = ice_alloc_sq_bufs(hw, cq); in ice_init_sq() 377 if (ret_code) in ice_init_sq() 381 ret_code = ice_cfg_sq_regs(hw, cq); in ice_init_sq() 382 if (ret_code) in ice_init_sq() 394 return ret_code; in ice_init_sq() [all …]
|
/drivers/mtd/ |
D | mtdcore.c | 1405 int ret_code; in mtd_read_oob() local 1409 ret_code = mtd_check_oob_ops(mtd, from, ops); in mtd_read_oob() 1410 if (ret_code) in mtd_read_oob() 1411 return ret_code; in mtd_read_oob() 1420 ret_code = mtd_io_emulated_slc(mtd, from, true, ops); in mtd_read_oob() 1422 ret_code = mtd_read_oob_std(mtd, from, ops); in mtd_read_oob() 1432 if (unlikely(ret_code < 0)) in mtd_read_oob() 1433 return ret_code; in mtd_read_oob() 1436 return ret_code >= mtd->bitflip_threshold ? -EUCLEAN : 0; in mtd_read_oob()
|
/drivers/gpu/drm/amd/amdgpu/ |
D | vcn_v1_0.c | 1213 int ret_code; in vcn_v1_0_pause_dpg_mode() local 1229 ret_code = 0; in vcn_v1_0_pause_dpg_mode() 1232 ret_code = SOC15_WAIT_ON_RREG(UVD, 0, mmUVD_POWER_STATUS, in vcn_v1_0_pause_dpg_mode() 1236 if (!ret_code) { in vcn_v1_0_pause_dpg_mode() 1285 ret_code = 0; in vcn_v1_0_pause_dpg_mode() 1288 ret_code = SOC15_WAIT_ON_RREG(UVD, 0, mmUVD_POWER_STATUS, in vcn_v1_0_pause_dpg_mode() 1292 if (!ret_code) { in vcn_v1_0_pause_dpg_mode()
|
D | vcn_v2_5.c | 1405 int ret_code = 0; in vcn_v2_5_pause_dpg_mode() local 1415 ret_code = SOC15_WAIT_ON_RREG(VCN, inst_idx, mmUVD_POWER_STATUS, 0x1, in vcn_v2_5_pause_dpg_mode() 1418 if (!ret_code) { in vcn_v2_5_pause_dpg_mode()
|
/drivers/scsi/bnx2i/ |
D | bnx2i_iscsi.c | 1416 int ret_code; in bnx2i_conn_bind() local 1426 ret_code = -EIO; in bnx2i_conn_bind() 1434 ret_code = -EINVAL; in bnx2i_conn_bind() 1439 ret_code = -EINVAL; in bnx2i_conn_bind() 1452 ret_code = -EEXIST; in bnx2i_conn_bind() 1460 ret_code = bnx2i_bind_conn_to_iscsi_cid(hba, bnx2i_conn, in bnx2i_conn_bind() 1472 return ret_code; in bnx2i_conn_bind()
|
/drivers/block/drbd/ |
D | drbd_nl.c | 188 adm_ctx->reply_dh->ret_code = NO_ERROR; in drbd_adm_prepare() 322 adm_ctx->reply_dh->ret_code = retcode; in drbd_adm_finish() 3333 dh->ret_code = NO_ERROR; in drbd_adm_dump_resources() 3455 dh->ret_code = retcode; in drbd_adm_dump_devices() 3581 dh->ret_code = retcode; in drbd_adm_dump_connections() 3723 dh->ret_code = retcode; in drbd_adm_dump_peer_devices() 3991 dh->ret_code = NO_ERROR; in get_one_status() 4009 dh->ret_code = NO_ERROR; in get_one_status() 4590 d_out->ret_code = NO_ERROR; in drbd_bcast_event() 4645 dh->ret_code = NO_ERROR; in notify_resource_state() [all …]
|
/drivers/net/ethernet/ti/ |
D | davinci_emac.c | 948 int ret_code; in emac_dev_xmit() local 958 ret_code = skb_padto(skb, EMAC_DEF_MIN_ETHPKTSIZE); in emac_dev_xmit() 959 if (unlikely(ret_code < 0)) { in emac_dev_xmit() 967 ret_code = cpdma_chan_submit(priv->txchan, skb, skb->data, skb->len, in emac_dev_xmit() 969 if (unlikely(ret_code != 0)) { in emac_dev_xmit()
|