Searched refs:bv (Results 1 – 7 of 7) sorted by relevance
/block/ |
D | blk-merge.c | 194 const struct bio_vec *bv, unsigned *nsegs, in bvec_split_segs() argument 199 unsigned len = min(bv->bv_len, max_len); in bvec_split_segs() 204 seg_size = get_max_segment_size(q, bv->bv_offset + total_len); in bvec_split_segs() 211 if ((bv->bv_offset + total_len) & queue_virt_boundary(q)) in bvec_split_segs() 218 return len > 0 || bv->bv_len > max_len; in bvec_split_segs() 245 struct bio_vec bv, bvprv, *bvprvp = NULL; in blk_bio_segment_split() local 251 bio_for_each_bvec(bv, bio, iter) { in blk_bio_segment_split() 256 if (bvprvp && bvec_gap_to_prev(q, bvprvp, bv.bv_offset)) in blk_bio_segment_split() 260 sectors + (bv.bv_len >> 9) <= max_sectors && in blk_bio_segment_split() 261 bv.bv_offset + bv.bv_len <= PAGE_SIZE) { in blk_bio_segment_split() [all …]
|
D | bio.c | 153 void bvec_free(mempool_t *pool, struct bio_vec *bv, unsigned int idx) in bvec_free() argument 162 mempool_free(bv, pool); in bvec_free() 166 kmem_cache_free(bvs->slab, bv); in bvec_free() 532 struct bio_vec bv; in zero_fill_bio_iter() local 535 __bio_for_each_segment(bv, bio, iter, start) { in zero_fill_bio_iter() 536 char *data = bvec_kmap_irq(&bv, &flags); in zero_fill_bio_iter() 537 memset(data, 0, bv.bv_len); in zero_fill_bio_iter() 538 flush_dcache_page(bv.bv_page); in zero_fill_bio_iter() 556 struct bio_vec bv; in bio_truncate() local 567 bio_for_each_segment(bv, bio, iter) { in bio_truncate() [all …]
|
D | blk-crypto-fallback.c | 171 struct bio_vec bv; in blk_crypto_clone_bio() local 184 bio_for_each_segment(bv, bio_src, iter) in blk_crypto_clone_bio() 185 bio->bi_io_vec[bio->bi_vcnt++] = bv; in blk_crypto_clone_bio() 229 struct bio_vec bv; in blk_crypto_split_bio_if_needed() local 232 bio_for_each_segment(bv, bio, iter) { in blk_crypto_split_bio_if_needed() 233 num_sectors += bv.bv_len >> SECTOR_SHIFT; in blk_crypto_split_bio_if_needed() 403 struct bio_vec bv; in blk_crypto_decrypt_bio() local 435 __bio_for_each_segment(bv, bio, iter, f_ctx->crypt_iter) { in blk_crypto_decrypt_bio() 436 struct page *page = bv.bv_page; in blk_crypto_decrypt_bio() 438 sg_set_page(&sg, page, data_unit_size, bv.bv_offset); in blk_crypto_decrypt_bio() [all …]
|
D | blk-crypto.c | 44 struct bio_vec bv; in bio_crypt_check_alignment() local 46 bio_for_each_segment(bv, bio, iter) { in bio_crypt_check_alignment() 47 if (!IS_ALIGNED(bv.bv_len | bv.bv_offset, data_unit_size)) in bio_crypt_check_alignment()
|
D | bio-integrity.c | 159 struct bio_vec bv; in bio_integrity_process() local 170 __bio_for_each_segment(bv, bio, bviter, *proc_iter) { in bio_integrity_process() 171 void *kaddr = kmap_atomic(bv.bv_page); in bio_integrity_process() 173 iter.data_buf = kaddr + bv.bv_offset; in bio_integrity_process() 174 iter.data_size = bv.bv_len; in bio_integrity_process()
|
D | bounce.c | 221 struct bio_vec bv; in bounce_clone_bio() local 265 bio_for_each_segment(bv, bio_src, iter) in bounce_clone_bio() 266 bio->bi_io_vec[bio->bi_vcnt++] = bv; in bounce_clone_bio()
|
D | blk-map.c | 22 struct bio_vec bv; in blk_rq_append_bio() local 27 bio_for_each_bvec(bv, *bio, iter) in blk_rq_append_bio()
|