Lines Matching +full:depth +full:-
3 * Copyright (C) 2013-2014 Jens Axboe
23 int sbitmap_init_node(struct sbitmap *sb, unsigned int depth, int shift, in sbitmap_init_node() argument
37 if (depth >= 4) { in sbitmap_init_node()
38 while ((4U << shift) > depth) in sbitmap_init_node()
39 shift--; in sbitmap_init_node()
44 return -EINVAL; in sbitmap_init_node()
46 sb->shift = shift; in sbitmap_init_node()
47 sb->depth = depth; in sbitmap_init_node()
48 sb->map_nr = DIV_ROUND_UP(sb->depth, bits_per_word); in sbitmap_init_node()
50 if (depth == 0) { in sbitmap_init_node()
51 sb->map = NULL; in sbitmap_init_node()
55 sb->map = kcalloc_node(sb->map_nr, sizeof(*sb->map), flags, node); in sbitmap_init_node()
56 if (!sb->map) in sbitmap_init_node()
57 return -ENOMEM; in sbitmap_init_node()
59 for (i = 0; i < sb->map_nr; i++) { in sbitmap_init_node()
60 sb->map[i].depth = min(depth, bits_per_word); in sbitmap_init_node()
61 depth -= sb->map[i].depth; in sbitmap_init_node()
67 void sbitmap_resize(struct sbitmap *sb, unsigned int depth) in sbitmap_resize() argument
69 unsigned int bits_per_word = 1U << sb->shift; in sbitmap_resize()
72 sb->depth = depth; in sbitmap_resize()
73 sb->map_nr = DIV_ROUND_UP(sb->depth, bits_per_word); in sbitmap_resize()
75 for (i = 0; i < sb->map_nr; i++) { in sbitmap_resize()
76 sb->map[i].depth = min(depth, bits_per_word); in sbitmap_resize()
77 depth -= sb->map[i].depth; in sbitmap_resize()
82 static int __sbitmap_get_word(unsigned long *word, unsigned long depth, in __sbitmap_get_word() argument
89 nr = find_next_zero_bit(word, depth, hint); in __sbitmap_get_word()
90 if (unlikely(nr >= depth)) { in __sbitmap_get_word()
100 return -1; in __sbitmap_get_word()
107 if (hint >= depth - 1) in __sbitmap_get_word()
117 int nr = -1; in sbitmap_get()
121 for (i = 0; i < sb->map_nr; i++) { in sbitmap_get()
122 nr = __sbitmap_get_word(&sb->map[index].word, in sbitmap_get()
123 sb->map[index].depth, in sbitmap_get()
126 if (nr != -1) { in sbitmap_get()
127 nr += index << sb->shift; in sbitmap_get()
133 alloc_hint = index << sb->shift; in sbitmap_get()
135 if (index >= sb->map_nr) { in sbitmap_get()
149 int nr = -1; in sbitmap_get_shallow()
153 for (i = 0; i < sb->map_nr; i++) { in sbitmap_get_shallow()
154 nr = __sbitmap_get_word(&sb->map[index].word, in sbitmap_get_shallow()
155 min(sb->map[index].depth, shallow_depth), in sbitmap_get_shallow()
157 if (nr != -1) { in sbitmap_get_shallow()
158 nr += index << sb->shift; in sbitmap_get_shallow()
164 alloc_hint = index << sb->shift; in sbitmap_get_shallow()
166 if (index >= sb->map_nr) { in sbitmap_get_shallow()
180 for (i = 0; i < sb->map_nr; i++) { in sbitmap_any_bit_set()
181 if (sb->map[i].word) in sbitmap_any_bit_set()
192 for (i = 0; i < sb->map_nr; i++) { in sbitmap_any_bit_clear()
193 const struct sbitmap_word *word = &sb->map[i]; in sbitmap_any_bit_clear()
196 ret = find_first_zero_bit(&word->word, word->depth); in sbitmap_any_bit_clear()
197 if (ret < word->depth) in sbitmap_any_bit_clear()
208 for (i = 0; i < sb->map_nr; i++) { in sbitmap_weight()
209 const struct sbitmap_word *word = &sb->map[i]; in sbitmap_weight()
211 weight += bitmap_weight(&word->word, word->depth); in sbitmap_weight()
219 seq_printf(m, "depth=%u\n", sb->depth); in sbitmap_show()
221 seq_printf(m, "bits_per_word=%u\n", 1U << sb->shift); in sbitmap_show()
222 seq_printf(m, "map_nr=%u\n", sb->map_nr); in sbitmap_show()
245 for (i = 0; i < sb->map_nr; i++) { in sbitmap_bitmap_show()
246 unsigned long word = READ_ONCE(sb->map[i].word); in sbitmap_bitmap_show()
247 unsigned int word_bits = READ_ONCE(sb->map[i].depth); in sbitmap_bitmap_show()
250 unsigned int bits = min(8 - byte_bits, word_bits); in sbitmap_bitmap_show()
252 byte |= (word & (BIT(bits) - 1)) << byte_bits; in sbitmap_bitmap_show()
261 word_bits -= bits; in sbitmap_bitmap_show()
274 unsigned int depth) in sbq_calc_wake_batch() argument
281 * batch size is small enough that the full depth of the bitmap, in sbq_calc_wake_batch()
282 * potentially limited by a shallow depth, is enough to wake up all of in sbq_calc_wake_batch()
286 * be a partial word. There are depth / bits_per_word full words and in sbq_calc_wake_batch()
287 * depth % bits_per_word bits left over. In bitwise arithmetic: in sbq_calc_wake_batch()
290 * depth / bits_per_word = depth >> shift in sbq_calc_wake_batch()
291 * depth % bits_per_word = depth & ((1 << shift) - 1) in sbq_calc_wake_batch()
293 * Each word can be limited to sbq->min_shallow_depth bits. in sbq_calc_wake_batch()
295 shallow_depth = min(1U << sbq->sb.shift, sbq->min_shallow_depth); in sbq_calc_wake_batch()
296 depth = ((depth >> sbq->sb.shift) * shallow_depth + in sbq_calc_wake_batch()
297 min(depth & ((1U << sbq->sb.shift) - 1), shallow_depth)); in sbq_calc_wake_batch()
298 wake_batch = clamp_t(unsigned int, depth / SBQ_WAIT_QUEUES, 1, in sbq_calc_wake_batch()
304 int sbitmap_queue_init_node(struct sbitmap_queue *sbq, unsigned int depth, in sbitmap_queue_init_node() argument
310 ret = sbitmap_init_node(&sbq->sb, depth, shift, flags, node); in sbitmap_queue_init_node()
314 sbq->alloc_hint = alloc_percpu_gfp(unsigned int, flags); in sbitmap_queue_init_node()
315 if (!sbq->alloc_hint) { in sbitmap_queue_init_node()
316 sbitmap_free(&sbq->sb); in sbitmap_queue_init_node()
317 return -ENOMEM; in sbitmap_queue_init_node()
320 if (depth && !round_robin) { in sbitmap_queue_init_node()
322 *per_cpu_ptr(sbq->alloc_hint, i) = prandom_u32() % depth; in sbitmap_queue_init_node()
325 sbq->min_shallow_depth = UINT_MAX; in sbitmap_queue_init_node()
326 sbq->wake_batch = sbq_calc_wake_batch(sbq, depth); in sbitmap_queue_init_node()
327 atomic_set(&sbq->wake_index, 0); in sbitmap_queue_init_node()
329 sbq->ws = kzalloc_node(SBQ_WAIT_QUEUES * sizeof(*sbq->ws), flags, node); in sbitmap_queue_init_node()
330 if (!sbq->ws) { in sbitmap_queue_init_node()
331 free_percpu(sbq->alloc_hint); in sbitmap_queue_init_node()
332 sbitmap_free(&sbq->sb); in sbitmap_queue_init_node()
333 return -ENOMEM; in sbitmap_queue_init_node()
337 init_waitqueue_head(&sbq->ws[i].wait); in sbitmap_queue_init_node()
338 atomic_set(&sbq->ws[i].wait_cnt, sbq->wake_batch); in sbitmap_queue_init_node()
341 sbq->round_robin = round_robin; in sbitmap_queue_init_node()
347 unsigned int depth) in sbitmap_queue_update_wake_batch() argument
349 unsigned int wake_batch = sbq_calc_wake_batch(sbq, depth); in sbitmap_queue_update_wake_batch()
352 if (sbq->wake_batch != wake_batch) { in sbitmap_queue_update_wake_batch()
353 WRITE_ONCE(sbq->wake_batch, wake_batch); in sbitmap_queue_update_wake_batch()
361 atomic_set(&sbq->ws[i].wait_cnt, 1); in sbitmap_queue_update_wake_batch()
365 void sbitmap_queue_resize(struct sbitmap_queue *sbq, unsigned int depth) in sbitmap_queue_resize() argument
367 sbitmap_queue_update_wake_batch(sbq, depth); in sbitmap_queue_resize()
368 sbitmap_resize(&sbq->sb, depth); in sbitmap_queue_resize()
374 unsigned int hint, depth; in __sbitmap_queue_get() local
377 hint = this_cpu_read(*sbq->alloc_hint); in __sbitmap_queue_get()
378 depth = READ_ONCE(sbq->sb.depth); in __sbitmap_queue_get()
379 if (unlikely(hint >= depth)) { in __sbitmap_queue_get()
380 hint = depth ? prandom_u32() % depth : 0; in __sbitmap_queue_get()
381 this_cpu_write(*sbq->alloc_hint, hint); in __sbitmap_queue_get()
383 nr = sbitmap_get(&sbq->sb, hint, sbq->round_robin); in __sbitmap_queue_get()
385 if (nr == -1) { in __sbitmap_queue_get()
387 this_cpu_write(*sbq->alloc_hint, 0); in __sbitmap_queue_get()
388 } else if (nr == hint || unlikely(sbq->round_robin)) { in __sbitmap_queue_get()
391 if (hint >= depth - 1) in __sbitmap_queue_get()
393 this_cpu_write(*sbq->alloc_hint, hint); in __sbitmap_queue_get()
403 unsigned int hint, depth; in __sbitmap_queue_get_shallow() local
406 WARN_ON_ONCE(shallow_depth < sbq->min_shallow_depth); in __sbitmap_queue_get_shallow()
408 hint = this_cpu_read(*sbq->alloc_hint); in __sbitmap_queue_get_shallow()
409 depth = READ_ONCE(sbq->sb.depth); in __sbitmap_queue_get_shallow()
410 if (unlikely(hint >= depth)) { in __sbitmap_queue_get_shallow()
411 hint = depth ? prandom_u32() % depth : 0; in __sbitmap_queue_get_shallow()
412 this_cpu_write(*sbq->alloc_hint, hint); in __sbitmap_queue_get_shallow()
414 nr = sbitmap_get_shallow(&sbq->sb, hint, shallow_depth); in __sbitmap_queue_get_shallow()
416 if (nr == -1) { in __sbitmap_queue_get_shallow()
418 this_cpu_write(*sbq->alloc_hint, 0); in __sbitmap_queue_get_shallow()
419 } else if (nr == hint || unlikely(sbq->round_robin)) { in __sbitmap_queue_get_shallow()
422 if (hint >= depth - 1) in __sbitmap_queue_get_shallow()
424 this_cpu_write(*sbq->alloc_hint, hint); in __sbitmap_queue_get_shallow()
434 sbq->min_shallow_depth = min_shallow_depth; in sbitmap_queue_min_shallow_depth()
435 sbitmap_queue_update_wake_batch(sbq, sbq->sb.depth); in sbitmap_queue_min_shallow_depth()
443 wake_index = atomic_read(&sbq->wake_index); in sbq_wake_ptr()
445 struct sbq_wait_state *ws = &sbq->ws[wake_index]; in sbq_wake_ptr()
447 if (waitqueue_active(&ws->wait)) { in sbq_wake_ptr()
448 int o = atomic_read(&sbq->wake_index); in sbq_wake_ptr()
451 atomic_cmpxchg(&sbq->wake_index, o, wake_index); in sbq_wake_ptr()
471 wait_cnt = atomic_dec_return(&ws->wait_cnt); in __sbq_wake_up()
475 wake_batch = READ_ONCE(sbq->wake_batch); in __sbq_wake_up()
489 ret = atomic_cmpxchg(&ws->wait_cnt, wait_cnt, wake_batch); in __sbq_wake_up()
491 sbq_index_atomic_inc(&sbq->wake_index); in __sbq_wake_up()
492 wake_up_nr(&ws->wait, wake_batch); in __sbq_wake_up()
512 sbitmap_clear_bit_unlock(&sbq->sb, nr); in sbitmap_queue_clear()
522 if (likely(!sbq->round_robin && nr < sbq->sb.depth)) in sbitmap_queue_clear()
523 *per_cpu_ptr(sbq->alloc_hint, cpu) = nr; in sbitmap_queue_clear()
536 wake_index = atomic_read(&sbq->wake_index); in sbitmap_queue_wake_all()
538 struct sbq_wait_state *ws = &sbq->ws[wake_index]; in sbitmap_queue_wake_all()
540 if (waitqueue_active(&ws->wait)) in sbitmap_queue_wake_all()
541 wake_up(&ws->wait); in sbitmap_queue_wake_all()
553 sbitmap_show(&sbq->sb, m); in sbitmap_queue_show()
561 seq_printf(m, "%u", *per_cpu_ptr(sbq->alloc_hint, i)); in sbitmap_queue_show()
565 seq_printf(m, "wake_batch=%u\n", sbq->wake_batch); in sbitmap_queue_show()
566 seq_printf(m, "wake_index=%d\n", atomic_read(&sbq->wake_index)); in sbitmap_queue_show()
570 struct sbq_wait_state *ws = &sbq->ws[i]; in sbitmap_queue_show()
573 atomic_read(&ws->wait_cnt), in sbitmap_queue_show()
574 waitqueue_active(&ws->wait) ? "active" : "inactive"); in sbitmap_queue_show()
578 seq_printf(m, "round_robin=%d\n", sbq->round_robin); in sbitmap_queue_show()
579 seq_printf(m, "min_shallow_depth=%u\n", sbq->min_shallow_depth); in sbitmap_queue_show()