• Home
  • Raw
  • Download

Lines Matching refs:qc

119 static enum ata_completion_errors adma_qc_prep(struct ata_queued_cmd *qc);
120 static unsigned int adma_qc_issue(struct ata_queued_cmd *qc);
121 static int adma_check_atapi_dma(struct ata_queued_cmd *qc);
172 static int adma_check_atapi_dma(struct ata_queued_cmd *qc) in adma_check_atapi_dma() argument
256 static int adma_fill_sg(struct ata_queued_cmd *qc) in adma_fill_sg() argument
259 struct ata_port *ap = qc->ap; in adma_fill_sg()
263 u8 pFLAGS = pORD | ((qc->tf.flags & ATA_TFLAG_WRITE) ? pDIRO : 0); in adma_fill_sg()
266 for_each_sg(qc->sg, sg, qc->n_elem, si) { in adma_fill_sg()
280 buf[i++] = qc->dev->dma_mode & 0xf; in adma_fill_sg()
298 static enum ata_completion_errors adma_qc_prep(struct ata_queued_cmd *qc) in adma_qc_prep() argument
300 struct adma_port_priv *pp = qc->ap->private_data; in adma_qc_prep()
307 adma_enter_reg_mode(qc->ap); in adma_qc_prep()
308 if (qc->tf.protocol != ATA_PROT_DMA) in adma_qc_prep()
326 buf[i++] = qc->tf.device; in adma_qc_prep()
328 if ((qc->tf.flags & ATA_TFLAG_LBA48)) { in adma_qc_prep()
329 buf[i++] = qc->tf.hob_nsect; in adma_qc_prep()
331 buf[i++] = qc->tf.hob_lbal; in adma_qc_prep()
333 buf[i++] = qc->tf.hob_lbam; in adma_qc_prep()
335 buf[i++] = qc->tf.hob_lbah; in adma_qc_prep()
338 buf[i++] = qc->tf.nsect; in adma_qc_prep()
340 buf[i++] = qc->tf.lbal; in adma_qc_prep()
342 buf[i++] = qc->tf.lbam; in adma_qc_prep()
344 buf[i++] = qc->tf.lbah; in adma_qc_prep()
350 buf[i++] = qc->tf.command; in adma_qc_prep()
356 i = adma_fill_sg(qc); in adma_qc_prep()
377 static inline void adma_packet_start(struct ata_queued_cmd *qc) in adma_packet_start() argument
379 struct ata_port *ap = qc->ap; in adma_packet_start()
388 static unsigned int adma_qc_issue(struct ata_queued_cmd *qc) in adma_qc_issue() argument
390 struct adma_port_priv *pp = qc->ap->private_data; in adma_qc_issue()
392 switch (qc->tf.protocol) { in adma_qc_issue()
395 adma_packet_start(qc); in adma_qc_issue()
407 return ata_sff_qc_issue(qc); in adma_qc_issue()
417 struct ata_queued_cmd *qc; in adma_intr_pkt() local
428 qc = ata_qc_from_tag(ap, ap->link.active_tag); in adma_intr_pkt()
429 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) { in adma_intr_pkt()
431 qc->err_mask |= AC_ERR_HOST_BUS; in adma_intr_pkt()
433 qc->err_mask |= AC_ERR_OTHER; in adma_intr_pkt()
436 qc->err_mask |= AC_ERR_DEV; in adma_intr_pkt()
438 qc->err_mask |= AC_ERR_OTHER; in adma_intr_pkt()
440 if (!qc->err_mask) in adma_intr_pkt()
441 ata_qc_complete(qc); in adma_intr_pkt()
450 if (qc->err_mask == AC_ERR_DEV) in adma_intr_pkt()
467 struct ata_queued_cmd *qc; in adma_intr_mmio() local
471 qc = ata_qc_from_tag(ap, ap->link.active_tag); in adma_intr_mmio()
472 if (qc && (!(qc->tf.flags & ATA_TFLAG_POLLING))) { in adma_intr_mmio()
479 ap->print_id, qc->tf.protocol, status); in adma_intr_mmio()
483 qc->err_mask |= ac_err_mask(status); in adma_intr_mmio()
484 if (!qc->err_mask) in adma_intr_mmio()
485 ata_qc_complete(qc); in adma_intr_mmio()
491 if (qc->err_mask == AC_ERR_DEV) in adma_intr_mmio()