Lines Matching refs:acb
508 QCowAIOCB *acb = (QCowAIOCB *)blockacb; in qcow_aio_cancel() local
509 if (acb->hd_aiocb) in qcow_aio_cancel()
510 bdrv_aio_cancel(acb->hd_aiocb); in qcow_aio_cancel()
511 qemu_aio_release(acb); in qcow_aio_cancel()
523 QCowAIOCB *acb; in qcow_aio_setup() local
525 acb = qemu_aio_get(&qcow_aio_pool, bs, cb, opaque); in qcow_aio_setup()
526 if (!acb) in qcow_aio_setup()
528 acb->hd_aiocb = NULL; in qcow_aio_setup()
529 acb->sector_num = sector_num; in qcow_aio_setup()
530 acb->qiov = qiov; in qcow_aio_setup()
532 acb->buf = acb->orig_buf = qemu_blockalign(bs, qiov->size); in qcow_aio_setup()
534 qemu_iovec_to_buffer(qiov, acb->buf); in qcow_aio_setup()
536 acb->buf = (uint8_t *)qiov->iov->iov_base; in qcow_aio_setup()
538 acb->nb_sectors = nb_sectors; in qcow_aio_setup()
539 acb->n = 0; in qcow_aio_setup()
540 acb->cluster_offset = 0; in qcow_aio_setup()
541 return acb; in qcow_aio_setup()
546 QCowAIOCB *acb = opaque; in qcow_aio_read_cb() local
547 BlockDriverState *bs = acb->common.bs; in qcow_aio_read_cb()
551 acb->hd_aiocb = NULL; in qcow_aio_read_cb()
557 if (!acb->cluster_offset) { in qcow_aio_read_cb()
559 } else if (acb->cluster_offset & QCOW_OFLAG_COMPRESSED) { in qcow_aio_read_cb()
563 encrypt_sectors(s, acb->sector_num, acb->buf, acb->buf, in qcow_aio_read_cb()
564 acb->n, 0, in qcow_aio_read_cb()
569 acb->nb_sectors -= acb->n; in qcow_aio_read_cb()
570 acb->sector_num += acb->n; in qcow_aio_read_cb()
571 acb->buf += acb->n * 512; in qcow_aio_read_cb()
573 if (acb->nb_sectors == 0) { in qcow_aio_read_cb()
580 acb->cluster_offset = get_cluster_offset(bs, acb->sector_num << 9, in qcow_aio_read_cb()
582 index_in_cluster = acb->sector_num & (s->cluster_sectors - 1); in qcow_aio_read_cb()
583 acb->n = s->cluster_sectors - index_in_cluster; in qcow_aio_read_cb()
584 if (acb->n > acb->nb_sectors) in qcow_aio_read_cb()
585 acb->n = acb->nb_sectors; in qcow_aio_read_cb()
587 if (!acb->cluster_offset) { in qcow_aio_read_cb()
590 acb->hd_iov.iov_base = (void *)acb->buf; in qcow_aio_read_cb()
591 acb->hd_iov.iov_len = acb->n * 512; in qcow_aio_read_cb()
592 qemu_iovec_init_external(&acb->hd_qiov, &acb->hd_iov, 1); in qcow_aio_read_cb()
593 acb->hd_aiocb = bdrv_aio_readv(bs->backing_hd, acb->sector_num, in qcow_aio_read_cb()
594 &acb->hd_qiov, acb->n, qcow_aio_read_cb, acb); in qcow_aio_read_cb()
595 if (acb->hd_aiocb == NULL) in qcow_aio_read_cb()
599 memset(acb->buf, 0, 512 * acb->n); in qcow_aio_read_cb()
602 } else if (acb->cluster_offset & QCOW_OFLAG_COMPRESSED) { in qcow_aio_read_cb()
604 if (decompress_cluster(s, acb->cluster_offset) < 0) in qcow_aio_read_cb()
606 memcpy(acb->buf, in qcow_aio_read_cb()
607 s->cluster_cache + index_in_cluster * 512, 512 * acb->n); in qcow_aio_read_cb()
610 if ((acb->cluster_offset & 511) != 0) { in qcow_aio_read_cb()
614 acb->hd_iov.iov_base = (void *)acb->buf; in qcow_aio_read_cb()
615 acb->hd_iov.iov_len = acb->n * 512; in qcow_aio_read_cb()
616 qemu_iovec_init_external(&acb->hd_qiov, &acb->hd_iov, 1); in qcow_aio_read_cb()
617 acb->hd_aiocb = bdrv_aio_readv(s->hd, in qcow_aio_read_cb()
618 (acb->cluster_offset >> 9) + index_in_cluster, in qcow_aio_read_cb()
619 &acb->hd_qiov, acb->n, qcow_aio_read_cb, acb); in qcow_aio_read_cb()
620 if (acb->hd_aiocb == NULL) in qcow_aio_read_cb()
627 if (acb->qiov->niov > 1) { in qcow_aio_read_cb()
628 qemu_iovec_from_buffer(acb->qiov, acb->orig_buf, acb->qiov->size); in qcow_aio_read_cb()
629 qemu_vfree(acb->orig_buf); in qcow_aio_read_cb()
631 acb->common.cb(acb->common.opaque, ret); in qcow_aio_read_cb()
632 qemu_aio_release(acb); in qcow_aio_read_cb()
639 QCowAIOCB *acb; in qcow_aio_readv() local
641 acb = qcow_aio_setup(bs, sector_num, qiov, nb_sectors, cb, opaque, 0); in qcow_aio_readv()
642 if (!acb) in qcow_aio_readv()
645 qcow_aio_read_cb(acb, 0); in qcow_aio_readv()
646 return &acb->common; in qcow_aio_readv()
651 QCowAIOCB *acb = opaque; in qcow_aio_write_cb() local
652 BlockDriverState *bs = acb->common.bs; in qcow_aio_write_cb()
658 acb->hd_aiocb = NULL; in qcow_aio_write_cb()
663 acb->nb_sectors -= acb->n; in qcow_aio_write_cb()
664 acb->sector_num += acb->n; in qcow_aio_write_cb()
665 acb->buf += acb->n * 512; in qcow_aio_write_cb()
667 if (acb->nb_sectors == 0) { in qcow_aio_write_cb()
673 index_in_cluster = acb->sector_num & (s->cluster_sectors - 1); in qcow_aio_write_cb()
674 acb->n = s->cluster_sectors - index_in_cluster; in qcow_aio_write_cb()
675 if (acb->n > acb->nb_sectors) in qcow_aio_write_cb()
676 acb->n = acb->nb_sectors; in qcow_aio_write_cb()
677 cluster_offset = get_cluster_offset(bs, acb->sector_num << 9, 1, 0, in qcow_aio_write_cb()
679 index_in_cluster + acb->n); in qcow_aio_write_cb()
685 if (!acb->cluster_data) { in qcow_aio_write_cb()
686 acb->cluster_data = qemu_mallocz(s->cluster_size); in qcow_aio_write_cb()
687 if (!acb->cluster_data) { in qcow_aio_write_cb()
692 encrypt_sectors(s, acb->sector_num, acb->cluster_data, acb->buf, in qcow_aio_write_cb()
693 acb->n, 1, &s->aes_encrypt_key); in qcow_aio_write_cb()
694 src_buf = acb->cluster_data; in qcow_aio_write_cb()
696 src_buf = acb->buf; in qcow_aio_write_cb()
699 acb->hd_iov.iov_base = (void *)src_buf; in qcow_aio_write_cb()
700 acb->hd_iov.iov_len = acb->n * 512; in qcow_aio_write_cb()
701 qemu_iovec_init_external(&acb->hd_qiov, &acb->hd_iov, 1); in qcow_aio_write_cb()
702 acb->hd_aiocb = bdrv_aio_writev(s->hd, in qcow_aio_write_cb()
704 &acb->hd_qiov, acb->n, in qcow_aio_write_cb()
705 qcow_aio_write_cb, acb); in qcow_aio_write_cb()
706 if (acb->hd_aiocb == NULL) in qcow_aio_write_cb()
711 if (acb->qiov->niov > 1) in qcow_aio_write_cb()
712 qemu_vfree(acb->orig_buf); in qcow_aio_write_cb()
713 acb->common.cb(acb->common.opaque, ret); in qcow_aio_write_cb()
714 qemu_aio_release(acb); in qcow_aio_write_cb()
722 QCowAIOCB *acb; in qcow_aio_writev() local
726 acb = qcow_aio_setup(bs, sector_num, qiov, nb_sectors, cb, opaque, 0); in qcow_aio_writev()
727 if (!acb) in qcow_aio_writev()
731 qcow_aio_write_cb(acb, 0); in qcow_aio_writev()
732 return &acb->common; in qcow_aio_writev()