Home
last modified time | relevance | path

Searched refs:min (Results 1 – 25 of 34) sorted by relevance

12

/lib/zlib_inflate/
Dinftrees.c28 unsigned min, max; /* minimum and maximum code lengths */ in zlib_inflate_table() local
112 for (min = 1; min < MAXBITS; min++) in zlib_inflate_table()
113 if (count[min] != 0) break; in zlib_inflate_table()
114 if (root < min) root = min; in zlib_inflate_table()
189 len = min; /* starting code length */ in zlib_inflate_table()
221 min = fill; /* save offset to next table */ in zlib_inflate_table()
252 next += min; /* here min is 1 << curr */ in zlib_inflate_table()
/lib/
Dlinear_ranges.c72 return r->min + (r->max_sel - r->min_sel) * r->step; in linear_range_get_max_value()
93 *val = r->min + (selector - r->min_sel) * r->step; in linear_range_get_value()
144 if (r->min > val) in linear_range_get_selector_low()
157 *selector = (val - r->min) / r->step + r->min_sel; in linear_range_get_selector_low()
228 if (r->min > val) { in linear_range_get_selector_high()
238 *selector = DIV_ROUND_UP(val - r->min, r->step) + r->min_sel; in linear_range_get_selector_high()
Dnlattr.c119 (pt->min < 0 || pt->max < 0)); in nla_get_range_unsigned()
121 range->min = 0; in nla_get_range_unsigned()
146 range->min = pt->min; in nla_get_range_unsigned()
153 range->min = pt->min; in nla_get_range_unsigned()
208 if (value < range.min || value > range.max) { in nla_validate_range_unsigned()
229 range->min = S8_MIN; in nla_get_range_signed()
233 range->min = S16_MIN; in nla_get_range_signed()
237 range->min = S32_MIN; in nla_get_range_signed()
241 range->min = S64_MIN; in nla_get_range_signed()
251 range->min = pt->min; in nla_get_range_signed()
[all …]
Didr.c380 int ida_alloc_range(struct ida *ida, unsigned int min, unsigned int max, in ida_alloc_range() argument
383 XA_STATE(xas, &ida->xa, min / IDA_BITMAP_BITS); in ida_alloc_range()
384 unsigned bit = min % IDA_BITMAP_BITS; in ida_alloc_range()
388 if ((int)min < 0) in ida_alloc_range()
398 if (xas.xa_index > min / IDA_BITMAP_BITS) in ida_alloc_range()
455 xas.xa_index = min / IDA_BITMAP_BITS; in ida_alloc_range()
456 bit = min % IDA_BITMAP_BITS; in ida_alloc_range()
469 xas_set(&xas, min / IDA_BITMAP_BITS); in ida_alloc_range()
470 bit = min % IDA_BITMAP_BITS; in ida_alloc_range()
Dfind_bit.c68 return min(start + __ffs(tmp), nbits); in _find_next_bit()
113 return min(idx * BITS_PER_LONG + __ffs(addr[idx]), size); in find_first_bit()
131 return min(idx * BITS_PER_LONG + ffz(addr[idx]), size); in find_first_zero_bit()
Dsbitmap.c81 sb->map[i].depth = min(depth, bits_per_word); in sbitmap_init_node()
101 sb->map[i].depth = min(depth, bits_per_word); in sbitmap_resize()
203 min(sb->map[index].depth, shallow_depth), in sbitmap_get_shallow()
301 unsigned int bits = min(8 - byte_bits, word_bits); in sbitmap_bitmap_show()
346 shallow_depth = min(1U << sbq->sb.shift, sbq->min_shallow_depth); in sbq_calc_wake_batch()
348 min(depth & ((1U << sbq->sb.shift) - 1), shallow_depth)); in sbq_calc_wake_batch()
Dtest_xarray.c535 unsigned long min = index & ~((1UL << order) - 1); in check_multi_store_1() local
536 unsigned long max = min + (1UL << order); in check_multi_store_1()
539 XA_BUG_ON(xa, xa_load(xa, min) != xa_mk_index(index)); in check_multi_store_1()
542 XA_BUG_ON(xa, xa_load(xa, min - 1) != NULL); in check_multi_store_1()
545 XA_BUG_ON(xa, xas_store(&xas, xa_mk_index(min)) != xa_mk_index(index)); in check_multi_store_1()
547 XA_BUG_ON(xa, xa_load(xa, min) != xa_mk_index(min)); in check_multi_store_1()
548 XA_BUG_ON(xa, xa_load(xa, max - 1) != xa_mk_index(min)); in check_multi_store_1()
550 XA_BUG_ON(xa, xa_load(xa, min - 1) != NULL); in check_multi_store_1()
552 xa_erase_index(xa, min); in check_multi_store_1()
902 unsigned int min = 1 << i; in check_store_iter() local
[all …]
Dtest_linear_ranges.c111 .min = RANGE1_MIN,
116 .min = RANGE2_MIN,
Dkfifo.c102 l = min(len, size - off); in kfifo_copy_in()
141 l = min(len, size - off); in kfifo_copy_out()
190 l = min(len, size - off); in kfifo_copy_from_user()
250 l = min(len, size - off); in kfifo_copy_to_user()
348 l = min(len, size - off); in setup_sgl()
Diov_iter.c22 __v.iov_len = min(n, __p->iov_len - skip); \
34 __v.iov_len = min(n, __p->iov_len); \
49 __v.iov_len = min(n, __p->iov_len - skip); \
58 __v.iov_len = min(n, __p->iov_len); \
183 copy = min(bytes, iov->iov_len - skip); in copy_page_to_iter_iovec()
199 copy = min(bytes, iov->iov_len); in copy_page_to_iter_iovec()
213 copy = min(bytes, iov->iov_len - skip); in copy_page_to_iter_iovec()
227 copy = min(bytes, iov->iov_len); in copy_page_to_iter_iovec()
267 copy = min(bytes, iov->iov_len - skip); in copy_page_from_iter_iovec()
283 copy = min(bytes, iov->iov_len); in copy_page_from_iter_iovec()
[all …]
Dseq_buf.c235 start_len = min(len, MAX_MEMHEX_BYTES);
374 linelen = min(remaining, rowsize);
Dkstrtox.c383 count = min(count, sizeof(buf) - 1); in kstrtobool_from_user()
397 count = min(count, sizeof(buf) - 1); \
Dxarray.c1822 xas.xa_index = limit.min; in __xa_alloc()
1860 u32 min = limit.min; in __xa_alloc_cyclic() local
1863 limit.min = max(min, *next); in __xa_alloc_cyclic()
1870 if (ret < 0 && limit.min > min) { in __xa_alloc_cyclic()
1871 limit.min = min; in __xa_alloc_cyclic()
Dobjagg.c225 unsigned int min, max; in objagg_obj_root_id_alloc() local
235 min = hnode->root_id; in objagg_obj_root_id_alloc()
241 min = objagg->hints->root_count; in objagg_obj_root_id_alloc()
245 root_id = ida_alloc_range(&objagg->root_ida, min, max, GFP_KERNEL); in objagg_obj_root_id_alloc()
Dhexdump.c281 linelen = min(remaining, rowsize); in print_hex_dump()
Dscatterlist.c941 len = min(miter.length, buflen - offset); in sg_copy_buffer()
1051 len = min(miter.length, buflen - offset); in sg_zero_buffer()
Dtest_hmm.c302 range.end = min(addr + (ARRAY_SIZE(pfns) << PAGE_SHIFT), end); in dmirror_fault()
695 next = min(end, addr + (ARRAY_SIZE(src_pfns) << PAGE_SHIFT)); in dmirror_migrate()
893 next = min(addr + (ARRAY_SIZE(pfns) << PAGE_SHIFT), end); in dmirror_snapshot()
Dbitmap.c513 bitmap_set(bitmap, start, min(r->end - start + 1, r->off)); in bitmap_set_region()
1128 nbitsinlong = min(nbits_reg, BITS_PER_LONG); in __reg_op()
/lib/zstd/
Dhuf_compress.c264 U16 min = 0; in HUF_readCTable_wksp() local
267 valPerRank[n] = min; /* get starting value within each rank */ in HUF_readCTable_wksp()
268 min += nbPerRank[n]; in HUF_readCTable_wksp()
269 min >>= 1; in HUF_readCTable_wksp()
485 U16 min = 0; in HUF_buildCTable_wksp() local
487 valPerRank[n] = min; /* get starting value within each rank */ in HUF_buildCTable_wksp()
488 min += nbPerRank[n]; in HUF_buildCTable_wksp()
489 min >>= 1; in HUF_buildCTable_wksp()
/lib/math/
Drational.c87 t = min(t, (max_numerator - n0) / n1); in rational_best_approximation()
Dreciprocal_div.c25 R.sh1 = min(l, 1); in reciprocal_value()
/lib/crypto/
Dpoly1305.c36 bytes = min(nbytes, POLY1305_BLOCK_SIZE - desc->buflen); in poly1305_update_generic()
/lib/lz4/
Dlz4_decompress.c342 length = min(length, (size_t)(oend - op)); in LZ4_decompress_generic()
388 size_t const mlen = min(length, (size_t)(oend - op)); in LZ4_decompress_generic()
472 dstCapacity = min(targetOutputSize, dstCapacity); in LZ4_decompress_safe_partial()
/lib/xz/
Dxz_dec_test.c124 buffers.in_size = min(remaining, sizeof(buffer_in)); in xz_dec_test_write()
/lib/reed_solomon/
Ddecode_rs.c272 for (i = min(deg_lambda, nroots - 1) & ~1; i >= 0; i -= 2) {

12