Searched refs:chunk_mask (Results 1 – 9 of 9) sorted by relevance
153 store->chunk_size = store->chunk_mask = store->chunk_shift = 0; in set_chunk_size()185 store->chunk_mask = chunk_size - 1; in dm_exception_store_set_chunk_size()
56 sector_t chunk_mask; member
579 sector = r10bio->sector & geo->chunk_mask; in __raid10_find_phys()619 sector += (geo->chunk_mask + 1); in __raid10_find_phys()661 offset = sector & geo->chunk_mask; in raid10_find_virt()1524 sector_t chunk_mask = (conf->geo.chunk_mask & conf->prev.chunk_mask); in raid10_make_request() local1525 int chunk_sects = chunk_mask + 1; in raid10_make_request()1539 if (unlikely((bio->bi_iter.bi_sector & chunk_mask) + in raid10_make_request()2913 sector_t chunk_mask = conf->geo.chunk_mask; in raid10_sync_request() local3012 max_sector > (sector_nr | chunk_mask)) in raid10_sync_request()3013 max_sector = (sector_nr | chunk_mask) + 1; in raid10_sync_request()3332 r10_bio->sectors = (sector_nr | chunk_mask) - sector_nr + 1; in raid10_sync_request()[all …]
122 unsigned chunk_mask; member
326 ps->store->chunk_mask = ps->store->chunk_size - 1; in read_header()
1907 (bio->bi_iter.bi_sector & s->store->chunk_mask); in remap_exception()
48 xsk_umem_fq_reuse(rq->umem, handle & rq->umem->chunk_mask); in mlx5e_xsk_recycle_frame()
297 handle &= rx_ring->xsk_umem->chunk_mask; in i40e_alloc_buffer_slow_zc()456 mask = rx_ring->xsk_umem->chunk_mask; in i40e_zca_free()
235 mask = rx_ring->xsk_umem->chunk_mask; in ixgbe_zca_free()295 handle &= rx_ring->xsk_umem->chunk_mask; in ixgbe_alloc_buffer_slow_zc()