/drivers/target/ |
D | target_core_file.c | 265 struct se_device *dev = cmd->se_dev; in fd_execute_rw_aio() 394 struct se_device *dev = cmd->se_dev; in fd_execute_sync_cache() 439 struct se_device *se_dev = cmd->se_dev; in fd_execute_write_same() local 440 struct fd_dev *fd_dev = FD_DEV(se_dev); in fd_execute_write_same() 441 loff_t pos = cmd->t_task_lba * se_dev->dev_attrib.block_size; in fd_execute_write_same() 459 cmd->t_data_sg[0].length != cmd->se_dev->dev_attrib.block_size) { in fd_execute_write_same() 464 cmd->se_dev->dev_attrib.block_size); in fd_execute_write_same() 477 len += se_dev->dev_attrib.block_size; in fd_execute_write_same() 494 fd_do_prot_fill(struct se_device *se_dev, sector_t lba, sector_t nolb, in fd_do_prot_fill() argument 497 struct fd_dev *fd_dev = FD_DEV(se_dev); in fd_do_prot_fill() [all …]
|
D | target_core_rd.c | 381 struct se_device *se_dev = cmd->se_dev; in rd_do_prot_rw() local 382 struct rd_dev *dev = RD_DEV(se_dev); in rd_do_prot_rw() 385 u32 sectors = cmd->data_length / se_dev->dev_attrib.block_size; in rd_do_prot_rw() 391 tmp = cmd->t_task_lba * se_dev->prot_length; in rd_do_prot_rw() 402 if (se_dev->dev_attrib.pi_prot_verify) { in rd_do_prot_rw() 420 struct se_device *se_dev = cmd->se_dev; in rd_execute_rw() local 421 struct rd_dev *dev = RD_DEV(se_dev); in rd_execute_rw() 437 tmp = cmd->t_task_lba * se_dev->dev_attrib.block_size; in rd_execute_rw() 453 if (cmd->prot_type && se_dev->dev_attrib.pi_prot_type && in rd_execute_rw() 521 if (cmd->prot_type && se_dev->dev_attrib.pi_prot_type && in rd_execute_rw()
|
D | target_core_xcopy.c | 56 static int target_xcopy_locate_se_dev_e4_iter(struct se_device *se_dev, in target_xcopy_locate_se_dev_e4_iter() argument 62 if (!se_dev->dev_attrib.emulate_3pc) { in target_xcopy_locate_se_dev_e4_iter() 63 pr_debug("XCOPY: emulate_3pc disabled on se_dev %p\n", se_dev); in target_xcopy_locate_se_dev_e4_iter() 68 target_xcopy_gen_naa_ieee(se_dev, &tmp_dev_wwn[0]); in target_xcopy_locate_se_dev_e4_iter() 76 pr_debug("XCOPY 0xe4: located se_dev: %p\n", se_dev); in target_xcopy_locate_se_dev_e4_iter() 188 xop->src_dev = se_cmd->se_dev; in target_xcopy_parse_tiddesc_e4() 205 xop->dst_dev = se_cmd->se_dev; in target_xcopy_parse_tiddesc_e4() 218 struct se_device *local_dev = se_cmd->se_dev; in target_xcopy_parse_target_descriptors() 538 struct se_device *se_dev, in target_xcopy_setup_pt_cmd() argument 549 cmd->se_lun = &se_dev->xcopy_lun; in target_xcopy_setup_pt_cmd() [all …]
|
D | target_core_spc.c | 54 struct se_device *dev = cmd->se_dev; in spc_emulate_inquiry_std() 121 struct se_device *dev = cmd->se_dev; in spc_emulate_evpd_80() 170 struct se_device *dev = cmd->se_dev; in spc_emulate_evpd_83() 440 struct se_device *dev = cmd->se_dev; in spc_emulate_evpd_86() 471 spin_lock(&cmd->se_dev->t10_alua.lba_map_lock); in spc_emulate_evpd_86() 474 spin_unlock(&cmd->se_dev->t10_alua.lba_map_lock); in spc_emulate_evpd_86() 482 struct se_device *dev = cmd->se_dev; in spc_emulate_evpd_b0() 576 struct se_device *dev = cmd->se_dev; in spc_emulate_evpd_b1() 589 struct se_device *dev = cmd->se_dev; in spc_emulate_evpd_b2() 653 struct se_device *dev = cmd->se_dev; in spc_emulate_evpd_b3() [all …]
|
D | target_core_pr.c | 89 struct se_device *dev = cmd->se_dev; in target_scsi2_reservation_check() 122 struct se_device *dev = cmd->se_dev; in target_check_scsi2_reservation_conflict() 127 pr_reg = core_scsi3_locate_pr_reg(cmd->se_dev, se_sess->se_node_acl, in target_check_scsi2_reservation_conflict() 203 struct se_device *dev = cmd->se_dev; in target_scsi2_reservation_release() 244 struct se_device *dev = cmd->se_dev; in target_scsi2_reservation_reserve() 565 struct se_device *dev = cmd->se_dev; in target_scsi3_pr_reservation_check() 1484 struct se_device *dev = cmd->se_dev; in core_scsi3_decode_spec_i_port() 1514 local_pr_reg = __core_scsi3_alloc_registration(cmd->se_dev, in core_scsi3_decode_spec_i_port() 1763 dest_pr_reg = __core_scsi3_alloc_registration(cmd->se_dev, in core_scsi3_decode_spec_i_port() 1811 __core_scsi3_add_registration(cmd->se_dev, dest_node_acl, in core_scsi3_decode_spec_i_port() [all …]
|
D | target_core_iblock.c | 310 struct iblock_dev *ib_dev = IBLOCK_DEV(cmd->se_dev); in iblock_get_bio() 368 struct iblock_dev *ib_dev = IBLOCK_DEV(cmd->se_dev); in iblock_execute_sync_cache() 392 struct block_device *bdev = IBLOCK_DEV(cmd->se_dev)->ibd_bd; in iblock_execute_unmap() 393 struct se_device *dev = cmd->se_dev; in iblock_execute_unmap() 411 struct se_device *dev = cmd->se_dev; in iblock_execute_zero_out() 444 struct block_device *bdev = IBLOCK_DEV(cmd->se_dev)->ibd_bd; in iblock_execute_write_same() 449 struct se_device *dev = cmd->se_dev; in iblock_execute_write_same() 462 sg->length != cmd->se_dev->dev_attrib.block_size) { in iblock_execute_write_same() 465 cmd->se_dev->dev_attrib.block_size); in iblock_execute_write_same() 625 struct se_device *dev = cmd->se_dev; in iblock_alloc_bip() [all …]
|
D | target_core_sbc.c | 34 struct se_device *dev = cmd->se_dev; in sbc_emulate_readcapacity() 77 struct se_device *dev = cmd->se_dev; in sbc_emulate_readcapacity_16() 187 return cmd->se_dev->transport->get_blocks(cmd->se_dev) - in sbc_get_write_same_sectors() 218 return cmd->se_dev->dev_attrib.block_size * sectors; in sbc_get_size() 284 struct se_device *dev = cmd->se_dev; in sbc_setup_write_same() 295 if (sectors > cmd->se_dev->dev_attrib.max_write_same_len) { in sbc_setup_write_same() 297 sectors, cmd->se_dev->dev_attrib.max_write_same_len); in sbc_setup_write_same() 416 struct se_device *dev = cmd->se_dev; in compare_and_write_post() 440 struct se_device *dev = cmd->se_dev; in compare_and_write_callback() 593 struct se_device *dev = cmd->se_dev; in sbc_compare_and_write() [all …]
|
D | target_core_transport.c | 647 struct se_device *dev = cmd->se_dev; in target_remove_from_state_list() 731 struct se_device *dev = cmd->se_dev; in transport_get_sense_buffer() 782 transport_handle_queue_full(cmd, cmd->se_dev, in target_handle_abort() 901 struct se_device *dev = cmd->se_dev; in target_add_to_state_list() 1305 struct se_device *dev = cmd->se_dev; in target_cmd_size_check() 1408 struct se_device *dev = cmd->se_dev; in transport_check_alloc_task_attr() 1482 struct se_device *dev = cmd->se_dev; in target_cmd_parse_cdb() 1942 cmd->se_dev->dev_attrib.emulate_ua_intlck_ctrl in transport_generic_request_failure() 1972 transport_handle_queue_full(cmd, cmd->se_dev, ret, false); in transport_generic_request_failure() 2034 sectors = cmd->data_length >> ilog2(cmd->se_dev->dev_attrib.block_size); in target_write_prot_action() [all …]
|
D | target_core_user.c | 113 struct se_device se_dev; member 166 #define TCMU_DEV(_se_dev) container_of(_se_dev, struct tcmu_dev, se_dev) 382 if (nl_cmd->udev->se_dev.dev_index == dev_id) { in tcmu_genl_cmd_done() 625 struct se_device *se_dev = se_cmd->se_dev; in tcmu_alloc_cmd() local 626 struct tcmu_dev *udev = TCMU_DEV(se_dev); in tcmu_alloc_cmd() 1178 struct se_device *se_dev = se_cmd->se_dev; in tcmu_queue_cmd() local 1179 struct tcmu_dev *udev = TCMU_DEV(se_dev); in tcmu_queue_cmd() 1228 tcmu_tmr_notify(struct se_device *se_dev, enum tcm_tmreq_table tmf, in tcmu_tmr_notify() argument 1237 struct tcmu_dev *udev = TCMU_DEV(se_dev); in tcmu_tmr_notify() 1580 return &udev->se_dev; in tcmu_alloc_device() [all …]
|
D | target_core_device.c | 132 se_cmd->se_dev = rcu_dereference_raw(se_lun->lun_se_dev); in transport_lookup_cmd_lun() 133 atomic_long_inc(&se_cmd->se_dev->num_cmds); in transport_lookup_cmd_lun() 137 &se_cmd->se_dev->write_bytes); in transport_lookup_cmd_lun() 140 &se_cmd->se_dev->read_bytes); in transport_lookup_cmd_lun() 180 se_cmd->se_dev = rcu_dereference_raw(se_lun->lun_se_dev); in transport_lookup_tmr_lun() 1080 struct se_device *dev = cmd->se_dev; in passthrough_parse_cdb()
|
D | target_core_alua.c | 58 struct se_device *dev = cmd->se_dev; in target_emulate_report_referrals() 138 struct se_device *dev = cmd->se_dev; in target_emulate_report_target_port_groups() 270 struct se_device *dev = cmd->se_dev; in target_emulate_set_target_port_groups() 465 struct se_device *dev = cmd->se_dev; in core_alua_state_lba_dependent() 672 struct se_device *dev = cmd->se_dev; in target_alua_state_check()
|
D | target_core_pscsi.c | 596 struct pscsi_dev_virt *pdv = PSCSI_DEV(cmd->se_dev); in pscsi_complete_cmd() 850 struct pscsi_dev_virt *pdv = PSCSI_DEV(cmd->se_dev); in pscsi_map_sg() 968 struct pscsi_dev_virt *pdv = PSCSI_DEV(cmd->se_dev); in pscsi_execute_cmd()
|
D | target_core_ua.c | 196 struct se_device *dev = cmd->se_dev; in core_scsi3_ua_for_check_condition()
|
/drivers/scsi/qla2xxx/ |
D | tcm_qla2xxx.c | 414 cmd->blk_sz = se_cmd->se_dev->dev_attrib.block_size; in tcm_qla2xxx_write_pending() 674 cmd->blk_sz = se_cmd->se_dev->dev_attrib.block_size; in tcm_qla2xxx_queue_data_in()
|
/drivers/infiniband/ulp/isert/ |
D | ib_isert.c | 1545 u32 block_size = se_cmd->se_dev->dev_attrib.block_size + 8; in isert_check_pi_status() 1987 domain->sig.dif.pi_interval = se_cmd->se_dev->dev_attrib.block_size; in isert_set_dif_domain()
|