Home
last modified time | relevance | path

Searched refs:qc (Results 1 – 25 of 87) sorted by relevance

1234

/drivers/ata/
Dlibata-sff.c698 static void ata_pio_sector(struct ata_queued_cmd *qc) in ata_pio_sector() argument
700 int do_write = (qc->tf.flags & ATA_TFLAG_WRITE); in ata_pio_sector()
701 struct ata_port *ap = qc->ap; in ata_pio_sector()
706 if (qc->curbytes == qc->nbytes - qc->sect_size) in ata_pio_sector()
709 page = sg_page(qc->cursg); in ata_pio_sector()
710 offset = qc->cursg->offset + qc->cursg_ofs; in ata_pio_sector()
716 DPRINTK("data %s\n", qc->tf.flags & ATA_TFLAG_WRITE ? "write" : "read"); in ata_pio_sector()
726 ap->ops->sff_data_xfer(qc->dev, buf + offset, qc->sect_size, in ata_pio_sector()
733 ap->ops->sff_data_xfer(qc->dev, buf + offset, qc->sect_size, in ata_pio_sector()
740 qc->curbytes += qc->sect_size; in ata_pio_sector()
[all …]
Dpdc_adma.c135 static void adma_qc_prep(struct ata_queued_cmd *qc);
136 static unsigned int adma_qc_issue(struct ata_queued_cmd *qc);
137 static int adma_check_atapi_dma(struct ata_queued_cmd *qc);
188 static int adma_check_atapi_dma(struct ata_queued_cmd *qc) in adma_check_atapi_dma() argument
272 static int adma_fill_sg(struct ata_queued_cmd *qc) in adma_fill_sg() argument
275 struct ata_port *ap = qc->ap; in adma_fill_sg()
279 u8 pFLAGS = pORD | ((qc->tf.flags & ATA_TFLAG_WRITE) ? pDIRO : 0); in adma_fill_sg()
282 for_each_sg(qc->sg, sg, qc->n_elem, si) { in adma_fill_sg()
296 buf[i++] = qc->dev->dma_mode & 0xf; in adma_fill_sg()
314 static void adma_qc_prep(struct ata_queued_cmd *qc) in adma_qc_prep() argument
[all …]
Dsata_dwc_460ex.c301 #define HSDEV_FROM_QC(qc) ((struct sata_dwc_device *)\ argument
302 (qc)->ap->host->private_data)
330 static void sata_dwc_bmdma_start_by_tag(struct ata_queued_cmd *qc, u8 tag);
331 static int sata_dwc_qc_complete(struct ata_port *ap, struct ata_queued_cmd *qc,
880 struct ata_queued_cmd *qc; in sata_dwc_error_intr() local
912 qc = ata_qc_from_tag(ap, tag); in sata_dwc_error_intr()
913 if (qc) in sata_dwc_error_intr()
914 qc->err_mask |= err_mask; in sata_dwc_error_intr()
933 struct ata_queued_cmd *qc; in sata_dwc_isr() local
970 qc = ata_qc_from_tag(ap, tag); in sata_dwc_isr()
[all …]
Dsata_sx4.c209 struct ata_queued_cmd *qc; member
221 static void pdc20621_qc_prep(struct ata_queued_cmd *qc);
237 static unsigned int pdc20621_qc_issue(struct ata_queued_cmd *qc);
240 static void pdc_post_internal_cmd(struct ata_queued_cmd *qc);
241 static int pdc_check_atapi_dma(struct ata_queued_cmd *qc);
453 static void pdc20621_dma_prep(struct ata_queued_cmd *qc) in pdc20621_dma_prep() argument
456 struct ata_port *ap = qc->ap; in pdc20621_dma_prep()
464 WARN_ON(!(qc->flags & ATA_QCFLAG_DMAMAP)); in pdc20621_dma_prep()
475 for_each_sg(qc->sg, sg, qc->n_elem, si) { in pdc20621_dma_prep()
487 pdc20621_host_pkt(&qc->tf, &pp->dimm_buf[0], portno); in pdc20621_dma_prep()
[all …]
Dlibata-scsi.c62 typedef unsigned int (*ata_xlat_func_t)(struct ata_queued_cmd *qc);
755 struct ata_queued_cmd *qc; in ata_scsi_qc_new() local
757 qc = ata_qc_new_init(dev); in ata_scsi_qc_new()
758 if (qc) { in ata_scsi_qc_new()
759 qc->scsicmd = cmd; in ata_scsi_qc_new()
760 qc->scsidone = cmd->scsi_done; in ata_scsi_qc_new()
762 qc->sg = scsi_sglist(cmd); in ata_scsi_qc_new()
763 qc->n_elem = scsi_sg_count(cmd); in ata_scsi_qc_new()
769 return qc; in ata_scsi_qc_new()
772 static void ata_qc_set_pc_nbytes(struct ata_queued_cmd *qc) in ata_qc_set_pc_nbytes() argument
[all …]
Dsata_qstor.c119 static void qs_qc_prep(struct ata_queued_cmd *qc);
120 static unsigned int qs_qc_issue(struct ata_queued_cmd *qc);
121 static int qs_check_atapi_dma(struct ata_queued_cmd *qc);
182 static int qs_check_atapi_dma(struct ata_queued_cmd *qc) in qs_check_atapi_dma() argument
252 static unsigned int qs_fill_sg(struct ata_queued_cmd *qc) in qs_fill_sg() argument
255 struct ata_port *ap = qc->ap; in qs_fill_sg()
260 for_each_sg(qc->sg, sg, qc->n_elem, si) { in qs_fill_sg()
279 static void qs_qc_prep(struct ata_queued_cmd *qc) in qs_qc_prep() argument
281 struct qs_port_priv *pp = qc->ap->private_data; in qs_qc_prep()
289 qs_enter_reg_mode(qc->ap); in qs_qc_prep()
[all …]
Dacard-ahci.c75 static void acard_ahci_qc_prep(struct ata_queued_cmd *qc);
76 static bool acard_ahci_qc_fill_rtf(struct ata_queued_cmd *qc);
229 static unsigned int acard_ahci_fill_sg(struct ata_queued_cmd *qc, void *cmd_tbl) in acard_ahci_fill_sg() argument
240 for_each_sg(qc->sg, sg, qc->n_elem, si) { in acard_ahci_fill_sg()
260 static void acard_ahci_qc_prep(struct ata_queued_cmd *qc) in acard_ahci_qc_prep() argument
262 struct ata_port *ap = qc->ap; in acard_ahci_qc_prep()
264 int is_atapi = ata_is_atapi(qc->tf.protocol); in acard_ahci_qc_prep()
274 cmd_tbl = pp->cmd_tbl + qc->tag * AHCI_CMD_TBL_SZ; in acard_ahci_qc_prep()
276 ata_tf_to_fis(&qc->tf, qc->dev->link->pmp, 1, cmd_tbl); in acard_ahci_qc_prep()
279 memcpy(cmd_tbl + AHCI_CMD_TBL_CDB, qc->cdb, qc->dev->cdb_len); in acard_ahci_qc_prep()
[all …]
Dsata_promise.c158 static void pdc_qc_prep(struct ata_queued_cmd *qc);
161 static int pdc_check_atapi_dma(struct ata_queued_cmd *qc);
162 static int pdc_old_sata_check_atapi_dma(struct ata_queued_cmd *qc);
164 static unsigned int pdc_qc_issue(struct ata_queued_cmd *qc);
174 static void pdc_post_internal_cmd(struct ata_queued_cmd *qc);
501 static void pdc_atapi_pkt(struct ata_queued_cmd *qc) in pdc_atapi_pkt() argument
503 struct ata_port *ap = qc->ap; in pdc_atapi_pkt()
505 unsigned int cdb_len = qc->dev->cdb_len; in pdc_atapi_pkt()
506 u8 *cdb = qc->cdb; in pdc_atapi_pkt()
515 switch (qc->tf.protocol) { in pdc_atapi_pkt()
[all …]
Dsata_inic162x.c379 struct ata_queued_cmd *qc = ata_qc_from_tag(ap, ap->link.active_tag); in inic_host_intr() local
391 if (unlikely(!qc)) in inic_host_intr()
402 qc->err_mask |= AC_ERR_DEV; in inic_host_intr()
404 ata_qc_complete(qc); in inic_host_intr()
410 qc ? qc->tf.command : 0xff, irq_stat, idma_stat); in inic_host_intr()
439 static int inic_check_atapi_dma(struct ata_queued_cmd *qc) in inic_check_atapi_dma() argument
447 if (atapi_cmd_type(qc->cdb[0]) == READ) in inic_check_atapi_dma()
452 static void inic_fill_sg(struct inic_prd *prd, struct ata_queued_cmd *qc) in inic_fill_sg() argument
458 if (qc->tf.flags & ATA_TFLAG_WRITE) in inic_fill_sg()
461 if (ata_is_dma(qc->tf.protocol)) in inic_fill_sg()
[all …]
Dsata_nv.c315 static int nv_adma_check_atapi_dma(struct ata_queued_cmd *qc);
316 static void nv_adma_qc_prep(struct ata_queued_cmd *qc);
317 static unsigned int nv_adma_qc_issue(struct ata_queued_cmd *qc);
330 static void nv_adma_post_internal_cmd(struct ata_queued_cmd *qc);
338 static void nv_swncq_qc_prep(struct ata_queued_cmd *qc);
339 static void nv_swncq_fill_sg(struct ata_queued_cmd *qc);
340 static unsigned int nv_swncq_qc_issue(struct ata_queued_cmd *qc);
783 static int nv_adma_check_atapi_dma(struct ata_queued_cmd *qc) in nv_adma_check_atapi_dma() argument
785 struct nv_adma_port_priv *pp = qc->ap->private_data; in nv_adma_check_atapi_dma()
882 struct ata_queued_cmd *qc = ata_qc_from_tag(ap, ap->link.active_tag); in nv_host_intr() local
[all …]
Dlibata-eh.c526 struct ata_queued_cmd *qc; in ata_scsi_timed_out() local
538 qc = ata_qc_from_tag(ap, ap->link.active_tag); in ata_scsi_timed_out()
539 if (qc) { in ata_scsi_timed_out()
540 WARN_ON(qc->scsicmd != cmd); in ata_scsi_timed_out()
541 qc->flags |= ATA_QCFLAG_EH_SCHEDULED; in ata_scsi_timed_out()
542 qc->err_mask |= AC_ERR_TIMEOUT; in ata_scsi_timed_out()
665 struct ata_queued_cmd *qc; in ata_scsi_cmd_error_handler() local
668 qc = __ata_qc_from_tag(ap, i); in ata_scsi_cmd_error_handler()
669 if (qc->flags & ATA_QCFLAG_ACTIVE && in ata_scsi_cmd_error_handler()
670 qc->scsicmd == scmd) in ata_scsi_cmd_error_handler()
[all …]
Dpata_pxa.c60 static void pxa_load_dmac(struct scatterlist *sg, struct ata_queued_cmd *qc) in pxa_load_dmac() argument
62 struct pata_pxa_data *pd = qc->ap->private_data; in pxa_load_dmac()
79 if (qc->tf.flags & ATA_TFLAG_WRITE) { in pxa_load_dmac()
105 static void pxa_qc_prep(struct ata_queued_cmd *qc) in pxa_qc_prep() argument
107 struct pata_pxa_data *pd = qc->ap->private_data; in pxa_qc_prep()
111 if (!(qc->flags & ATA_QCFLAG_DMAMAP)) in pxa_qc_prep()
119 for_each_sg(qc->sg, sg, qc->n_elem, si) in pxa_qc_prep()
120 pxa_load_dmac(sg, qc); in pxa_qc_prep()
136 static void pxa_bmdma_setup(struct ata_queued_cmd *qc) in pxa_bmdma_setup() argument
138 qc->ap->ops->sff_exec_command(qc->ap, &qc->tf); in pxa_bmdma_setup()
[all …]
Dsata_sil.c122 static void sil_qc_prep(struct ata_queued_cmd *qc);
123 static void sil_bmdma_setup(struct ata_queued_cmd *qc);
124 static void sil_bmdma_start(struct ata_queued_cmd *qc);
125 static void sil_bmdma_stop(struct ata_queued_cmd *qc);
267 static void sil_bmdma_stop(struct ata_queued_cmd *qc) in sil_bmdma_stop() argument
269 struct ata_port *ap = qc->ap; in sil_bmdma_stop()
280 static void sil_bmdma_setup(struct ata_queued_cmd *qc) in sil_bmdma_setup() argument
282 struct ata_port *ap = qc->ap; in sil_bmdma_setup()
289 ap->ops->sff_exec_command(ap, &qc->tf); in sil_bmdma_setup()
292 static void sil_bmdma_start(struct ata_queued_cmd *qc) in sil_bmdma_start() argument
[all …]
Dsata_sil24.c338 static int sil24_qc_defer(struct ata_queued_cmd *qc);
339 static void sil24_qc_prep(struct ata_queued_cmd *qc);
340 static unsigned int sil24_qc_issue(struct ata_queued_cmd *qc);
341 static bool sil24_qc_fill_rtf(struct ata_queued_cmd *qc);
353 static void sil24_post_internal_cmd(struct ata_queued_cmd *qc);
778 static inline void sil24_fill_sg(struct ata_queued_cmd *qc, in sil24_fill_sg() argument
785 for_each_sg(qc->sg, sg, qc->n_elem, si) { in sil24_fill_sg()
797 static int sil24_qc_defer(struct ata_queued_cmd *qc) in sil24_qc_defer() argument
799 struct ata_link *link = qc->dev->link; in sil24_qc_defer()
801 u8 prot = qc->tf.protocol; in sil24_qc_defer()
[all …]
Dpata_ns87415.c120 static void ns87415_bmdma_setup(struct ata_queued_cmd *qc) in ns87415_bmdma_setup() argument
122 struct ata_port *ap = qc->ap; in ns87415_bmdma_setup()
123 unsigned int rw = (qc->tf.flags & ATA_TFLAG_WRITE); in ns87415_bmdma_setup()
140 ap->ops->sff_exec_command(ap, &qc->tf); in ns87415_bmdma_setup()
154 static void ns87415_bmdma_start(struct ata_queued_cmd *qc) in ns87415_bmdma_start() argument
156 ns87415_set_mode(qc->ap, qc->dev, qc->dev->dma_mode); in ns87415_bmdma_start()
157 ata_bmdma_start(qc); in ns87415_bmdma_start()
167 static void ns87415_bmdma_stop(struct ata_queued_cmd *qc) in ns87415_bmdma_stop() argument
169 ata_bmdma_stop(qc); in ns87415_bmdma_stop()
170 ns87415_set_mode(qc->ap, qc->dev, qc->dev->pio_mode); in ns87415_bmdma_stop()
[all …]
Dpata_arasan_cf.c217 struct ata_queued_cmd *qc; member
366 struct ata_queued_cmd *qc = acdev->qc; in dma_complete() local
369 acdev->qc = NULL; in dma_complete()
373 if (unlikely(qc->err_mask) && ata_is_dma(qc->tf.protocol)) in dma_complete()
374 ata_ehi_push_desc(&qc->ap->link.eh_info, "DMA Failed: Timeout"); in dma_complete()
381 u32 rw = acdev->qc->tf.flags & ATA_TFLAG_WRITE; in wait4buf()
435 u32 write = acdev->qc->tf.flags & ATA_TFLAG_WRITE; in sg_xfer()
522 struct ata_queued_cmd *qc = acdev->qc; in data_xfer() local
536 for_each_sg(qc->sg, sg, qc->n_elem, temp) { in data_xfer()
549 status = ioread8(qc->ap->ioaddr.altstatus_addr); in data_xfer()
[all …]
Dpata_octeon_cf.c549 static void octeon_cf_dma_setup(struct ata_queued_cmd *qc) in octeon_cf_dma_setup() argument
551 struct ata_port *ap = qc->ap; in octeon_cf_dma_setup()
557 qc->cursg = qc->sg; in octeon_cf_dma_setup()
559 ap->ops->sff_exec_command(ap, &qc->tf); in octeon_cf_dma_setup()
568 static void octeon_cf_dma_start(struct ata_queued_cmd *qc) in octeon_cf_dma_start() argument
570 struct octeon_cf_port *cf_port = qc->ap->private_data; in octeon_cf_dma_start()
575 VPRINTK("%d scatterlists\n", qc->n_elem); in octeon_cf_dma_start()
578 sg = qc->cursg; in octeon_cf_dma_start()
597 mio_boot_dma_cfg.s.rw = ((qc->tf.flags & ATA_TFLAG_WRITE) != 0); in octeon_cf_dma_start()
631 struct ata_queued_cmd *qc) in octeon_cf_dma_finished() argument
[all …]
Dpata_triflex.c154 static void triflex_bmdma_start(struct ata_queued_cmd *qc) in triflex_bmdma_start() argument
156 triflex_load_timing(qc->ap, qc->dev, qc->dev->dma_mode); in triflex_bmdma_start()
157 ata_bmdma_start(qc); in triflex_bmdma_start()
170 static void triflex_bmdma_stop(struct ata_queued_cmd *qc) in triflex_bmdma_stop() argument
172 ata_bmdma_stop(qc); in triflex_bmdma_stop()
173 triflex_load_timing(qc->ap, qc->dev, qc->dev->pio_mode); in triflex_bmdma_stop()
Dlibata-core.c1516 static void ata_qc_complete_internal(struct ata_queued_cmd *qc) in ata_qc_complete_internal() argument
1518 struct completion *waiting = qc->private_data; in ata_qc_complete_internal()
1554 struct ata_queued_cmd *qc; in ata_exec_internal_sg() local
1585 qc = __ata_qc_from_tag(ap, tag); in ata_exec_internal_sg()
1587 qc->tag = tag; in ata_exec_internal_sg()
1588 qc->scsicmd = NULL; in ata_exec_internal_sg()
1589 qc->ap = ap; in ata_exec_internal_sg()
1590 qc->dev = dev; in ata_exec_internal_sg()
1591 ata_qc_reinit(qc); in ata_exec_internal_sg()
1603 qc->tf = *tf; in ata_exec_internal_sg()
[all …]
Dpata_sl82c105.c160 static void sl82c105_bmdma_start(struct ata_queued_cmd *qc) in sl82c105_bmdma_start() argument
162 struct ata_port *ap = qc->ap; in sl82c105_bmdma_start()
169 sl82c105_configure_dmamode(ap, qc->dev); in sl82c105_bmdma_start()
171 ata_bmdma_start(qc); in sl82c105_bmdma_start()
189 static void sl82c105_bmdma_stop(struct ata_queued_cmd *qc) in sl82c105_bmdma_stop() argument
191 struct ata_port *ap = qc->ap; in sl82c105_bmdma_stop()
193 ata_bmdma_stop(qc); in sl82c105_bmdma_stop()
199 sl82c105_set_piomode(ap, qc->dev); in sl82c105_bmdma_stop()
212 static int sl82c105_qc_defer(struct ata_queued_cmd *qc) in sl82c105_qc_defer() argument
214 struct ata_host *host = qc->ap->host; in sl82c105_qc_defer()
[all …]
Dpata_pdc202xx_old.c176 static void pdc2026x_bmdma_start(struct ata_queued_cmd *qc) in pdc2026x_bmdma_start() argument
178 struct ata_port *ap = qc->ap; in pdc2026x_bmdma_start()
179 struct ata_device *adev = qc->dev; in pdc2026x_bmdma_start()
180 struct ata_taskfile *tf = &qc->tf; in pdc2026x_bmdma_start()
197 pdc202xx_set_dmamode(ap, qc->dev); in pdc2026x_bmdma_start()
201 len = qc->nbytes / 2; in pdc2026x_bmdma_start()
212 ata_bmdma_start(qc); in pdc2026x_bmdma_start()
226 static void pdc2026x_bmdma_stop(struct ata_queued_cmd *qc) in pdc2026x_bmdma_stop() argument
228 struct ata_port *ap = qc->ap; in pdc2026x_bmdma_stop()
229 struct ata_device *adev = qc->dev; in pdc2026x_bmdma_stop()
[all …]
Dsata_fsl.c441 static unsigned int sata_fsl_fill_sg(struct ata_queued_cmd *qc, void *cmd_desc, in sata_fsl_fill_sg() argument
465 for_each_sg(qc->sg, sg, qc->n_elem, si) { in sata_fsl_fill_sg()
474 ata_port_err(qc->ap, "s/g addr unaligned : 0x%llx\n", in sata_fsl_fill_sg()
477 ata_port_err(qc->ap, "s/g len unaligned : 0x%x\n", in sata_fsl_fill_sg()
515 static void sata_fsl_qc_prep(struct ata_queued_cmd *qc) in sata_fsl_qc_prep() argument
517 struct ata_port *ap = qc->ap; in sata_fsl_qc_prep()
521 unsigned int tag = sata_fsl_tag(qc->tag, hcr_base); in sata_fsl_qc_prep()
531 ata_tf_to_fis(&qc->tf, qc->dev->link->pmp, 1, (u8 *) &cd->cfis); in sata_fsl_qc_prep()
536 if (qc->tf.protocol == ATA_PROT_NCQ) { in sata_fsl_qc_prep()
542 if (ata_is_atapi(qc->tf.protocol)) { in sata_fsl_qc_prep()
[all …]
Dpata_bf54x.c832 static void bfin_bmdma_setup(struct ata_queued_cmd *qc) in bfin_bmdma_setup() argument
834 struct ata_port *ap = qc->ap; in bfin_bmdma_setup()
844 dev_dbg(qc->ap->dev, "in atapi dma setup\n"); in bfin_bmdma_setup()
846 if (qc->tf.flags & ATA_TFLAG_WRITE) { in bfin_bmdma_setup()
855 dma_map_sg(ap->dev, qc->sg, qc->n_elem, dir); in bfin_bmdma_setup()
858 for_each_sg(qc->sg, sg, qc->n_elem, si) { in bfin_bmdma_setup()
867 dma_desc_cpu[qc->n_elem - 1].cfg &= ~(DMAFLOW | NDSIZE); in bfin_bmdma_setup()
871 qc->n_elem * sizeof(struct dma_desc_array)); in bfin_bmdma_setup()
882 bfin_exec_command(ap, &qc->tf); in bfin_bmdma_setup()
884 if (qc->tf.flags & ATA_TFLAG_WRITE) { in bfin_bmdma_setup()
[all …]
Dsata_mv.c602 static int mv_qc_defer(struct ata_queued_cmd *qc);
603 static void mv_qc_prep(struct ata_queued_cmd *qc);
604 static void mv_qc_prep_iie(struct ata_queued_cmd *qc);
605 static unsigned int mv_qc_issue(struct ata_queued_cmd *qc);
658 static int mv_check_atapi_dma(struct ata_queued_cmd *qc);
659 static void mv_bmdma_setup(struct ata_queued_cmd *qc);
660 static void mv_bmdma_start(struct ata_queued_cmd *qc);
661 static void mv_bmdma_stop(struct ata_queued_cmd *qc);
1423 static int mv_qc_defer(struct ata_queued_cmd *qc) in mv_qc_defer() argument
1425 struct ata_link *link = qc->dev->link; in mv_qc_defer()
[all …]
/drivers/scsi/libsas/
Dsas_ata.c97 struct ata_queued_cmd *qc = task->uldd_task; in sas_ata_task_done() local
110 else if (qc && qc->scsicmd) in sas_ata_task_done()
111 ASSIGN_SAS_TASK(qc->scsicmd, NULL); in sas_ata_task_done()
118 if (!qc) in sas_ata_task_done()
121 ap = qc->ap; in sas_ata_task_done()
128 if (qc->scsicmd) in sas_ata_task_done()
145 qc->err_mask |= ac_err_mask(dev->sata_dev.fis[2]); in sas_ata_task_done()
149 qc->flags |= ATA_QCFLAG_FAILED; in sas_ata_task_done()
158 qc->err_mask = ac; in sas_ata_task_done()
161 qc->flags |= ATA_QCFLAG_FAILED; in sas_ata_task_done()
[all …]

1234