Lines Matching refs:qc
1465 static void ata_qc_complete_internal(struct ata_queued_cmd *qc) in ata_qc_complete_internal() argument
1467 struct completion *waiting = qc->private_data; in ata_qc_complete_internal()
1503 struct ata_queued_cmd *qc; in ata_exec_internal_sg() local
1522 qc = __ata_qc_from_tag(ap, ATA_TAG_INTERNAL); in ata_exec_internal_sg()
1524 qc->tag = ATA_TAG_INTERNAL; in ata_exec_internal_sg()
1525 qc->hw_tag = 0; in ata_exec_internal_sg()
1526 qc->scsicmd = NULL; in ata_exec_internal_sg()
1527 qc->ap = ap; in ata_exec_internal_sg()
1528 qc->dev = dev; in ata_exec_internal_sg()
1529 ata_qc_reinit(qc); in ata_exec_internal_sg()
1541 qc->tf = *tf; in ata_exec_internal_sg()
1543 memcpy(qc->cdb, cdb, ATAPI_CDB_LEN); in ata_exec_internal_sg()
1548 qc->tf.feature |= ATAPI_DMADIR; in ata_exec_internal_sg()
1550 qc->flags |= ATA_QCFLAG_RESULT_TF; in ata_exec_internal_sg()
1551 qc->dma_dir = dma_dir; in ata_exec_internal_sg()
1559 ata_sg_init(qc, sgl, n_elem); in ata_exec_internal_sg()
1560 qc->nbytes = buflen; in ata_exec_internal_sg()
1563 qc->private_data = &wait; in ata_exec_internal_sg()
1564 qc->complete_fn = ata_qc_complete_internal; in ata_exec_internal_sg()
1566 ata_qc_issue(qc); in ata_exec_internal_sg()
1597 if (qc->flags & ATA_QCFLAG_ACTIVE) { in ata_exec_internal_sg()
1598 qc->err_mask |= AC_ERR_TIMEOUT; in ata_exec_internal_sg()
1603 ata_qc_complete(qc); in ata_exec_internal_sg()
1615 ap->ops->post_internal_cmd(qc); in ata_exec_internal_sg()
1618 if (qc->flags & ATA_QCFLAG_FAILED) { in ata_exec_internal_sg()
1619 if (qc->result_tf.status & (ATA_ERR | ATA_DF)) in ata_exec_internal_sg()
1620 qc->err_mask |= AC_ERR_DEV; in ata_exec_internal_sg()
1622 if (!qc->err_mask) in ata_exec_internal_sg()
1623 qc->err_mask |= AC_ERR_OTHER; in ata_exec_internal_sg()
1625 if (qc->err_mask & ~AC_ERR_OTHER) in ata_exec_internal_sg()
1626 qc->err_mask &= ~AC_ERR_OTHER; in ata_exec_internal_sg()
1627 } else if (qc->tf.command == ATA_CMD_REQ_SENSE_DATA) { in ata_exec_internal_sg()
1628 qc->result_tf.status |= ATA_SENSE; in ata_exec_internal_sg()
1634 *tf = qc->result_tf; in ata_exec_internal_sg()
1635 err_mask = qc->err_mask; in ata_exec_internal_sg()
1637 ata_qc_free(qc); in ata_exec_internal_sg()
4395 int atapi_check_dma(struct ata_queued_cmd *qc) in atapi_check_dma() argument
4397 struct ata_port *ap = qc->ap; in atapi_check_dma()
4402 if (!(qc->dev->horkage & ATA_HORKAGE_ATAPI_MOD16_DMA) && in atapi_check_dma()
4403 unlikely(qc->nbytes & 15)) in atapi_check_dma()
4407 return ap->ops->check_atapi_dma(qc); in atapi_check_dma()
4427 int ata_std_qc_defer(struct ata_queued_cmd *qc) in ata_std_qc_defer() argument
4429 struct ata_link *link = qc->dev->link; in ata_std_qc_defer()
4431 if (ata_is_ncq(qc->tf.protocol)) { in ata_std_qc_defer()
4443 enum ata_completion_errors ata_noop_qc_prep(struct ata_queued_cmd *qc) in ata_noop_qc_prep() argument
4462 void ata_sg_init(struct ata_queued_cmd *qc, struct scatterlist *sg, in ata_sg_init() argument
4465 qc->sg = sg; in ata_sg_init()
4466 qc->n_elem = n_elem; in ata_sg_init()
4467 qc->cursg = qc->sg; in ata_sg_init()
4481 static void ata_sg_clean(struct ata_queued_cmd *qc) in ata_sg_clean() argument
4483 struct ata_port *ap = qc->ap; in ata_sg_clean()
4484 struct scatterlist *sg = qc->sg; in ata_sg_clean()
4485 int dir = qc->dma_dir; in ata_sg_clean()
4489 VPRINTK("unmapping %u sg elements\n", qc->n_elem); in ata_sg_clean()
4491 if (qc->n_elem) in ata_sg_clean()
4492 dma_unmap_sg(ap->dev, sg, qc->orig_n_elem, dir); in ata_sg_clean()
4494 qc->flags &= ~ATA_QCFLAG_DMAMAP; in ata_sg_clean()
4495 qc->sg = NULL; in ata_sg_clean()
4511 static int ata_sg_setup(struct ata_queued_cmd *qc) in ata_sg_setup() argument
4513 struct ata_port *ap = qc->ap; in ata_sg_setup()
4518 n_elem = dma_map_sg(ap->dev, qc->sg, qc->n_elem, qc->dma_dir); in ata_sg_setup()
4523 qc->orig_n_elem = qc->n_elem; in ata_sg_setup()
4524 qc->n_elem = n_elem; in ata_sg_setup()
4525 qc->flags |= ATA_QCFLAG_DMAMAP; in ata_sg_setup()
4532 static inline void ata_sg_clean(struct ata_queued_cmd *qc) {} in ata_sg_clean() argument
4533 static inline int ata_sg_setup(struct ata_queued_cmd *qc) { return -1; } in ata_sg_setup() argument
4571 struct ata_queued_cmd *qc; in ata_qc_new_init() local
4584 qc = __ata_qc_from_tag(ap, tag); in ata_qc_new_init()
4585 qc->tag = qc->hw_tag = tag; in ata_qc_new_init()
4586 qc->scsicmd = NULL; in ata_qc_new_init()
4587 qc->ap = ap; in ata_qc_new_init()
4588 qc->dev = dev; in ata_qc_new_init()
4590 ata_qc_reinit(qc); in ata_qc_new_init()
4592 return qc; in ata_qc_new_init()
4605 void ata_qc_free(struct ata_queued_cmd *qc) in ata_qc_free() argument
4610 WARN_ON_ONCE(qc == NULL); /* ata_qc_from_tag _might_ return NULL */ in ata_qc_free()
4611 ap = qc->ap; in ata_qc_free()
4613 qc->flags = 0; in ata_qc_free()
4614 tag = qc->tag; in ata_qc_free()
4616 qc->tag = ATA_TAG_POISON; in ata_qc_free()
4622 void __ata_qc_complete(struct ata_queued_cmd *qc) in __ata_qc_complete() argument
4627 WARN_ON_ONCE(qc == NULL); /* ata_qc_from_tag _might_ return NULL */ in __ata_qc_complete()
4628 WARN_ON_ONCE(!(qc->flags & ATA_QCFLAG_ACTIVE)); in __ata_qc_complete()
4629 ap = qc->ap; in __ata_qc_complete()
4630 link = qc->dev->link; in __ata_qc_complete()
4632 if (likely(qc->flags & ATA_QCFLAG_DMAMAP)) in __ata_qc_complete()
4633 ata_sg_clean(qc); in __ata_qc_complete()
4636 if (ata_is_ncq(qc->tf.protocol)) { in __ata_qc_complete()
4637 link->sactive &= ~(1 << qc->hw_tag); in __ata_qc_complete()
4646 if (unlikely(qc->flags & ATA_QCFLAG_CLEAR_EXCL && in __ata_qc_complete()
4654 qc->flags &= ~ATA_QCFLAG_ACTIVE; in __ata_qc_complete()
4655 ap->qc_active &= ~(1ULL << qc->tag); in __ata_qc_complete()
4658 qc->complete_fn(qc); in __ata_qc_complete()
4661 static void fill_result_tf(struct ata_queued_cmd *qc) in fill_result_tf() argument
4663 struct ata_port *ap = qc->ap; in fill_result_tf()
4665 qc->result_tf.flags = qc->tf.flags; in fill_result_tf()
4666 ap->ops->qc_fill_rtf(qc); in fill_result_tf()
4669 static void ata_verify_xfer(struct ata_queued_cmd *qc) in ata_verify_xfer() argument
4671 struct ata_device *dev = qc->dev; in ata_verify_xfer()
4673 if (!ata_is_data(qc->tf.protocol)) in ata_verify_xfer()
4676 if ((dev->mwdma_mask || dev->udma_mask) && ata_is_pio(qc->tf.protocol)) in ata_verify_xfer()
4697 void ata_qc_complete(struct ata_queued_cmd *qc) in ata_qc_complete() argument
4699 struct ata_port *ap = qc->ap; in ata_qc_complete()
4702 ledtrig_disk_activity(!!(qc->tf.flags & ATA_TFLAG_WRITE)); in ata_qc_complete()
4718 struct ata_device *dev = qc->dev; in ata_qc_complete()
4721 if (unlikely(qc->err_mask)) in ata_qc_complete()
4722 qc->flags |= ATA_QCFLAG_FAILED; in ata_qc_complete()
4728 if (unlikely(ata_tag_internal(qc->tag))) { in ata_qc_complete()
4729 fill_result_tf(qc); in ata_qc_complete()
4730 trace_ata_qc_complete_internal(qc); in ata_qc_complete()
4731 __ata_qc_complete(qc); in ata_qc_complete()
4739 if (unlikely(qc->flags & ATA_QCFLAG_FAILED)) { in ata_qc_complete()
4740 fill_result_tf(qc); in ata_qc_complete()
4741 trace_ata_qc_complete_failed(qc); in ata_qc_complete()
4742 ata_qc_schedule_eh(qc); in ata_qc_complete()
4749 if (qc->flags & ATA_QCFLAG_RESULT_TF) in ata_qc_complete()
4750 fill_result_tf(qc); in ata_qc_complete()
4752 trace_ata_qc_complete_done(qc); in ata_qc_complete()
4756 switch (qc->tf.command) { in ata_qc_complete()
4758 if (qc->tf.feature != SETFEATURES_WC_ON && in ata_qc_complete()
4759 qc->tf.feature != SETFEATURES_WC_OFF && in ata_qc_complete()
4760 qc->tf.feature != SETFEATURES_RA_ON && in ata_qc_complete()
4761 qc->tf.feature != SETFEATURES_RA_OFF) in ata_qc_complete()
4777 ata_verify_xfer(qc); in ata_qc_complete()
4779 __ata_qc_complete(qc); in ata_qc_complete()
4781 if (qc->flags & ATA_QCFLAG_EH_SCHEDULED) in ata_qc_complete()
4785 if (qc->err_mask || qc->flags & ATA_QCFLAG_RESULT_TF) in ata_qc_complete()
4786 fill_result_tf(qc); in ata_qc_complete()
4788 __ata_qc_complete(qc); in ata_qc_complete()
4829 void ata_qc_issue(struct ata_queued_cmd *qc) in ata_qc_issue() argument
4831 struct ata_port *ap = qc->ap; in ata_qc_issue()
4832 struct ata_link *link = qc->dev->link; in ata_qc_issue()
4833 u8 prot = qc->tf.protocol; in ata_qc_issue()
4842 WARN_ON_ONCE(link->sactive & (1 << qc->hw_tag)); in ata_qc_issue()
4846 link->sactive |= 1 << qc->hw_tag; in ata_qc_issue()
4851 link->active_tag = qc->tag; in ata_qc_issue()
4854 qc->flags |= ATA_QCFLAG_ACTIVE; in ata_qc_issue()
4855 ap->qc_active |= 1ULL << qc->tag; in ata_qc_issue()
4861 if (ata_is_data(prot) && (!qc->sg || !qc->n_elem || !qc->nbytes)) in ata_qc_issue()
4866 if (ata_sg_setup(qc)) in ata_qc_issue()
4870 if (unlikely(qc->dev->flags & ATA_DFLAG_SLEEPING)) { in ata_qc_issue()
4877 qc->err_mask |= ap->ops->qc_prep(qc); in ata_qc_issue()
4878 if (unlikely(qc->err_mask)) in ata_qc_issue()
4880 trace_ata_qc_issue(qc); in ata_qc_issue()
4881 qc->err_mask |= ap->ops->qc_issue(qc); in ata_qc_issue()
4882 if (unlikely(qc->err_mask)) in ata_qc_issue()
4887 qc->err_mask |= AC_ERR_SYSTEM; in ata_qc_issue()
4889 ata_qc_complete(qc); in ata_qc_issue()
6495 static unsigned int ata_dummy_qc_issue(struct ata_queued_cmd *qc) in ata_dummy_qc_issue() argument