Lines Matching refs:cw
396 static void dmz_handle_bio(struct dmz_target *dmz, struct dm_chunk_work *cw, in dmz_handle_bio() argument
458 static inline void dmz_get_chunk_work(struct dm_chunk_work *cw) in dmz_get_chunk_work() argument
460 refcount_inc(&cw->refcount); in dmz_get_chunk_work()
467 static void dmz_put_chunk_work(struct dm_chunk_work *cw) in dmz_put_chunk_work() argument
469 if (refcount_dec_and_test(&cw->refcount)) { in dmz_put_chunk_work()
470 WARN_ON(!bio_list_empty(&cw->bio_list)); in dmz_put_chunk_work()
471 radix_tree_delete(&cw->target->chunk_rxtree, cw->chunk); in dmz_put_chunk_work()
472 kfree(cw); in dmz_put_chunk_work()
481 struct dm_chunk_work *cw = container_of(work, struct dm_chunk_work, work); in dmz_chunk_work() local
482 struct dmz_target *dmz = cw->target; in dmz_chunk_work()
488 while ((bio = bio_list_pop(&cw->bio_list))) { in dmz_chunk_work()
490 dmz_handle_bio(dmz, cw, bio); in dmz_chunk_work()
492 dmz_put_chunk_work(cw); in dmz_chunk_work()
496 dmz_put_chunk_work(cw); in dmz_chunk_work()
538 struct dm_chunk_work *cw; in dmz_queue_chunk_work() local
544 cw = radix_tree_lookup(&dmz->chunk_rxtree, chunk); in dmz_queue_chunk_work()
545 if (cw) { in dmz_queue_chunk_work()
546 dmz_get_chunk_work(cw); in dmz_queue_chunk_work()
549 cw = kmalloc(sizeof(struct dm_chunk_work), GFP_NOIO); in dmz_queue_chunk_work()
550 if (unlikely(!cw)) { in dmz_queue_chunk_work()
555 INIT_WORK(&cw->work, dmz_chunk_work); in dmz_queue_chunk_work()
556 refcount_set(&cw->refcount, 1); in dmz_queue_chunk_work()
557 cw->target = dmz; in dmz_queue_chunk_work()
558 cw->chunk = chunk; in dmz_queue_chunk_work()
559 bio_list_init(&cw->bio_list); in dmz_queue_chunk_work()
561 ret = radix_tree_insert(&dmz->chunk_rxtree, chunk, cw); in dmz_queue_chunk_work()
563 kfree(cw); in dmz_queue_chunk_work()
568 bio_list_add(&cw->bio_list, bio); in dmz_queue_chunk_work()
570 if (queue_work(dmz->chunk_wq, &cw->work)) in dmz_queue_chunk_work()
571 dmz_get_chunk_work(cw); in dmz_queue_chunk_work()