Lines Matching refs:tf
413 void ata_sff_tf_load(struct ata_port *ap, const struct ata_taskfile *tf) in ata_sff_tf_load() argument
416 unsigned int is_addr = tf->flags & ATA_TFLAG_ISADDR; in ata_sff_tf_load()
418 if (tf->ctl != ap->last_ctl) { in ata_sff_tf_load()
420 iowrite8(tf->ctl, ioaddr->ctl_addr); in ata_sff_tf_load()
421 ap->last_ctl = tf->ctl; in ata_sff_tf_load()
425 if (is_addr && (tf->flags & ATA_TFLAG_LBA48)) { in ata_sff_tf_load()
427 iowrite8(tf->hob_feature, ioaddr->feature_addr); in ata_sff_tf_load()
428 iowrite8(tf->hob_nsect, ioaddr->nsect_addr); in ata_sff_tf_load()
429 iowrite8(tf->hob_lbal, ioaddr->lbal_addr); in ata_sff_tf_load()
430 iowrite8(tf->hob_lbam, ioaddr->lbam_addr); in ata_sff_tf_load()
431 iowrite8(tf->hob_lbah, ioaddr->lbah_addr); in ata_sff_tf_load()
433 tf->hob_feature, in ata_sff_tf_load()
434 tf->hob_nsect, in ata_sff_tf_load()
435 tf->hob_lbal, in ata_sff_tf_load()
436 tf->hob_lbam, in ata_sff_tf_load()
437 tf->hob_lbah); in ata_sff_tf_load()
441 iowrite8(tf->feature, ioaddr->feature_addr); in ata_sff_tf_load()
442 iowrite8(tf->nsect, ioaddr->nsect_addr); in ata_sff_tf_load()
443 iowrite8(tf->lbal, ioaddr->lbal_addr); in ata_sff_tf_load()
444 iowrite8(tf->lbam, ioaddr->lbam_addr); in ata_sff_tf_load()
445 iowrite8(tf->lbah, ioaddr->lbah_addr); in ata_sff_tf_load()
447 tf->feature, in ata_sff_tf_load()
448 tf->nsect, in ata_sff_tf_load()
449 tf->lbal, in ata_sff_tf_load()
450 tf->lbam, in ata_sff_tf_load()
451 tf->lbah); in ata_sff_tf_load()
454 if (tf->flags & ATA_TFLAG_DEVICE) { in ata_sff_tf_load()
455 iowrite8(tf->device, ioaddr->device_addr); in ata_sff_tf_load()
456 VPRINTK("device 0x%X\n", tf->device); in ata_sff_tf_load()
476 void ata_sff_tf_read(struct ata_port *ap, struct ata_taskfile *tf) in ata_sff_tf_read() argument
480 tf->command = ata_sff_check_status(ap); in ata_sff_tf_read()
481 tf->feature = ioread8(ioaddr->error_addr); in ata_sff_tf_read()
482 tf->nsect = ioread8(ioaddr->nsect_addr); in ata_sff_tf_read()
483 tf->lbal = ioread8(ioaddr->lbal_addr); in ata_sff_tf_read()
484 tf->lbam = ioread8(ioaddr->lbam_addr); in ata_sff_tf_read()
485 tf->lbah = ioread8(ioaddr->lbah_addr); in ata_sff_tf_read()
486 tf->device = ioread8(ioaddr->device_addr); in ata_sff_tf_read()
488 if (tf->flags & ATA_TFLAG_LBA48) { in ata_sff_tf_read()
490 iowrite8(tf->ctl | ATA_HOB, ioaddr->ctl_addr); in ata_sff_tf_read()
491 tf->hob_feature = ioread8(ioaddr->error_addr); in ata_sff_tf_read()
492 tf->hob_nsect = ioread8(ioaddr->nsect_addr); in ata_sff_tf_read()
493 tf->hob_lbal = ioread8(ioaddr->lbal_addr); in ata_sff_tf_read()
494 tf->hob_lbam = ioread8(ioaddr->lbam_addr); in ata_sff_tf_read()
495 tf->hob_lbah = ioread8(ioaddr->lbah_addr); in ata_sff_tf_read()
496 iowrite8(tf->ctl, ioaddr->ctl_addr); in ata_sff_tf_read()
497 ap->last_ctl = tf->ctl; in ata_sff_tf_read()
515 void ata_sff_exec_command(struct ata_port *ap, const struct ata_taskfile *tf) in ata_sff_exec_command() argument
517 DPRINTK("ata%u: cmd 0x%X\n", ap->print_id, tf->command); in ata_sff_exec_command()
519 iowrite8(tf->command, ap->ioaddr.command_addr); in ata_sff_exec_command()
537 const struct ata_taskfile *tf) in ata_tf_to_host() argument
539 ap->ops->sff_tf_load(ap, tf); in ata_tf_to_host()
540 ap->ops->sff_exec_command(ap, tf); in ata_tf_to_host()
700 int do_write = (qc->tf.flags & ATA_TFLAG_WRITE); in ata_pio_sector()
716 DPRINTK("data %s\n", qc->tf.flags & ATA_TFLAG_WRITE ? "write" : "read"); in ata_pio_sector()
761 if (is_multi_taskfile(&qc->tf)) { in ata_pio_sectors()
798 switch (qc->tf.protocol) { in atapi_send_cdb()
830 int rw = (qc->tf.flags & ATA_TFLAG_WRITE) ? WRITE : READ; in __atapi_pio_bytes()
861 DPRINTK("data %s\n", qc->tf.flags & ATA_TFLAG_WRITE ? "write" : "read"); in __atapi_pio_bytes()
917 int i_write, do_write = (qc->tf.flags & ATA_TFLAG_WRITE) ? 1 : 0; in atapi_pio_bytes()
970 if (qc->tf.flags & ATA_TFLAG_POLLING) in ata_hsm_ok_in_wq()
974 if (qc->tf.protocol == ATA_PROT_PIO && in ata_hsm_ok_in_wq()
975 (qc->tf.flags & ATA_TFLAG_WRITE)) in ata_hsm_ok_in_wq()
978 if (ata_is_atapi(qc->tf.protocol) && in ata_hsm_ok_in_wq()
1064 ap->print_id, qc->tf.protocol, ap->hsm_task_state, status); in ata_sff_hsm_move()
1074 poll_next = (qc->tf.flags & ATA_TFLAG_POLLING); in ata_sff_hsm_move()
1123 if (qc->tf.protocol == ATA_PROT_PIO) { in ata_sff_hsm_move()
1148 if (qc->tf.protocol == ATAPI_PROT_PIO) { in ata_sff_hsm_move()
1225 if (!(qc->tf.flags & ATA_TFLAG_WRITE)) { in ata_sff_hsm_move()
1258 (!(qc->tf.flags & ATA_TFLAG_WRITE))) { in ata_sff_hsm_move()
1417 qc->tf.flags |= ATA_TFLAG_POLLING; in ata_sff_qc_issue()
1423 switch (qc->tf.protocol) { in ata_sff_qc_issue()
1425 if (qc->tf.flags & ATA_TFLAG_POLLING) in ata_sff_qc_issue()
1428 ata_tf_to_host(ap, &qc->tf); in ata_sff_qc_issue()
1431 if (qc->tf.flags & ATA_TFLAG_POLLING) in ata_sff_qc_issue()
1437 if (qc->tf.flags & ATA_TFLAG_POLLING) in ata_sff_qc_issue()
1440 ata_tf_to_host(ap, &qc->tf); in ata_sff_qc_issue()
1442 if (qc->tf.flags & ATA_TFLAG_WRITE) { in ata_sff_qc_issue()
1454 if (qc->tf.flags & ATA_TFLAG_POLLING) in ata_sff_qc_issue()
1467 if (qc->tf.flags & ATA_TFLAG_POLLING) in ata_sff_qc_issue()
1470 ata_tf_to_host(ap, &qc->tf); in ata_sff_qc_issue()
1476 (qc->tf.flags & ATA_TFLAG_POLLING)) in ata_sff_qc_issue()
1532 ap->print_id, qc->tf.protocol, ap->hsm_task_state); in __ata_sff_port_intr()
1613 if (!(qc->tf.flags & ATA_TFLAG_POLLING)) in __ata_sff_interrupt()
1706 if (!qc || qc->tf.flags & ATA_TFLAG_POLLING) in ata_sff_lost_interrupt()
1881 struct ata_taskfile tf; in ata_sff_dev_classify() local
1887 memset(&tf, 0, sizeof(tf)); in ata_sff_dev_classify()
1889 ap->ops->sff_tf_read(ap, &tf); in ata_sff_dev_classify()
1890 err = tf.feature; in ata_sff_dev_classify()
1906 class = ata_dev_classify(&tf); in ata_sff_dev_classify()
2795 if (!ata_is_dma(qc->tf.protocol)) in ata_bmdma_qc_issue()
2802 switch (qc->tf.protocol) { in ata_bmdma_qc_issue()
2804 WARN_ON_ONCE(qc->tf.flags & ATA_TFLAG_POLLING); in ata_bmdma_qc_issue()
2806 ap->ops->sff_tf_load(ap, &qc->tf); /* load tf registers */ in ata_bmdma_qc_issue()
2813 WARN_ON_ONCE(qc->tf.flags & ATA_TFLAG_POLLING); in ata_bmdma_qc_issue()
2815 ap->ops->sff_tf_load(ap, &qc->tf); /* load tf registers */ in ata_bmdma_qc_issue()
2853 if (ap->hsm_task_state == HSM_ST_LAST && ata_is_dma(qc->tf.protocol)) { in ata_bmdma_port_intr()
2875 if (unlikely(qc->err_mask) && ata_is_dma(qc->tf.protocol)) in ata_bmdma_port_intr()
2927 if (qc && ata_is_dma(qc->tf.protocol)) { in ata_bmdma_error_handler()
2973 if (ata_is_dma(qc->tf.protocol)) { in ata_bmdma_post_internal_cmd()
3013 unsigned int rw = (qc->tf.flags & ATA_TFLAG_WRITE); in ata_bmdma_setup()
3028 ap->ops->sff_exec_command(ap, &qc->tf); in ata_bmdma_setup()