Lines Matching refs:io_req
465 static int dp_init(struct dm_io_request *io_req, struct dpages *dp, in dp_init() argument
473 switch (io_req->mem.type) { in dp_init()
475 list_dp_init(dp, io_req->mem.ptr.pl, io_req->mem.offset); in dp_init()
479 bio_dp_init(dp, io_req->mem.ptr.bio); in dp_init()
483 flush_kernel_vmap_range(io_req->mem.ptr.vma, size); in dp_init()
484 if ((io_req->bi_rw & RW_MASK) == READ) { in dp_init()
485 dp->vma_invalidate_address = io_req->mem.ptr.vma; in dp_init()
488 vm_dp_init(dp, io_req->mem.ptr.vma); in dp_init()
492 km_dp_init(dp, io_req->mem.ptr.addr); in dp_init()
510 int dm_io(struct dm_io_request *io_req, unsigned num_regions, in dm_io() argument
516 r = dp_init(io_req, &dp, (unsigned long)where->count << SECTOR_SHIFT); in dm_io()
520 if (!io_req->notify.fn) in dm_io()
521 return sync_io(io_req->client, num_regions, where, in dm_io()
522 io_req->bi_rw, &dp, sync_error_bits); in dm_io()
524 return async_io(io_req->client, num_regions, where, io_req->bi_rw, in dm_io()
525 &dp, io_req->notify.fn, io_req->notify.context); in dm_io()