/lib/ |
D | lockref.c | 46 new.count++; in lockref_get() 52 lockref->count++; in lockref_get() 67 new.count++; in lockref_get_not_zero() 68 if (old.count <= 0) in lockref_get_not_zero() 76 if (lockref->count > 0) { in lockref_get_not_zero() 77 lockref->count++; in lockref_get_not_zero() 95 new.count--; in lockref_put_not_zero() 96 if (old.count <= 1) in lockref_put_not_zero() 104 if (lockref->count > 1) { in lockref_put_not_zero() 105 lockref->count--; in lockref_put_not_zero() [all …]
|
D | string.c | 114 char *strncpy(char *dest, const char *src, size_t count) in strncpy() argument 118 while (count) { in strncpy() 122 count--; in strncpy() 180 ssize_t strscpy(char *dest, const char *src, size_t count) in strscpy() argument 183 size_t max = count; in strscpy() 186 if (count == 0 || WARN_ON_ONCE(count > INT_MAX)) in strscpy() 217 count -= sizeof(unsigned long); in strscpy() 221 while (count) { in strscpy() 229 count--; in strscpy() 261 ssize_t strscpy_pad(char *dest, const char *src, size_t count) in strscpy_pad() argument [all …]
|
D | iomap.c | 41 static int count = 10; in bad_io_access() local 42 if (count) { in bad_io_access() 43 count--; in bad_io_access() 278 static inline void mmio_insb(const void __iomem *addr, u8 *dst, int count) in mmio_insb() argument 280 while (--count >= 0) { in mmio_insb() 286 static inline void mmio_insw(const void __iomem *addr, u16 *dst, int count) in mmio_insw() argument 288 while (--count >= 0) { in mmio_insw() 294 static inline void mmio_insl(const void __iomem *addr, u32 *dst, int count) in mmio_insl() argument 296 while (--count >= 0) { in mmio_insl() 305 static inline void mmio_outsb(void __iomem *addr, const u8 *src, int count) in mmio_outsb() argument [all …]
|
D | percpu_counter.c | 70 fbc->count = amount; in percpu_counter_set() 84 s64 count; in percpu_counter_add_batch() local 87 count = __this_cpu_read(*fbc->counters) + amount; in percpu_counter_add_batch() 88 if (abs(count) >= batch) { in percpu_counter_add_batch() 91 fbc->count += count; in percpu_counter_add_batch() 92 __this_cpu_sub(*fbc->counters, count - amount); in percpu_counter_add_batch() 110 s64 count; in percpu_counter_sync() local 113 count = __this_cpu_read(*fbc->counters); in percpu_counter_sync() 114 fbc->count += count; in percpu_counter_sync() 115 __this_cpu_sub(*fbc->counters, count); in percpu_counter_sync() [all …]
|
D | strnlen_user.c | 23 static inline long do_strnlen_user(const char __user *src, unsigned long count, unsigned long max) in do_strnlen_user() argument 60 if (res >= count) in do_strnlen_user() 61 return count+1; in do_strnlen_user() 92 long strnlen_user(const char __user *str, long count) in strnlen_user() argument 96 if (unlikely(count <= 0)) in strnlen_user() 109 if (max > count) in strnlen_user() 110 max = count; in strnlen_user() 113 retval = do_strnlen_user(str, count, max); in strnlen_user()
|
D | strncpy_from_user.c | 29 unsigned long count, unsigned long max) in do_strncpy_from_user() argument 84 if (res >= count) in do_strncpy_from_user() 113 long strncpy_from_user(char *dst, const char __user *src, long count) in strncpy_from_user() argument 120 if (unlikely(count <= 0)) in strncpy_from_user() 133 if (max > count) in strncpy_from_user() 134 max = count; in strncpy_from_user() 136 kasan_check_write(dst, count); in strncpy_from_user() 137 check_object_size(dst, count, false); in strncpy_from_user() 139 retval = do_strncpy_from_user(dst, src, count, max); in strncpy_from_user()
|
D | iomap_copy.c | 21 size_t count) in __iowrite32_copy() argument 25 const u32 *end = src + count; in __iowrite32_copy() 42 void __ioread32_copy(void *to, const void __iomem *from, size_t count) in __ioread32_copy() argument 46 const u32 __iomem *end = src + count; in __ioread32_copy() 65 size_t count) in __iowrite64_copy() argument 70 const u64 *end = src + count; in __iowrite64_copy() 75 __iowrite32_copy(to, from, count * 2); in __iowrite64_copy()
|
D | test_strscpy.c | 29 static int __init tc(char *src, int count, int expected, in tc() argument 50 max_expected = count - 1; /* Space for the null */ in tc() 51 if (count > max_count) { in tc() 52 pr_err("count (%d) is too big (%d) ... aborting", count, max_count); in tc() 60 written = strscpy_pad(buf, src, count); in tc() 66 if (count && written == -E2BIG) { in tc() 67 if (strncmp(buf, src, count - 1) != 0) { in tc() 71 if (buf[count - 1] != '\0') { in tc() 85 if (buf[count - 1] != '\0') { in tc()
|
D | percpu-refcount.c | 98 atomic_long_set(&data->count, start_count); in percpu_ref_init() 141 ref->percpu_count_ptr |= atomic_long_read(&ref->data->count) << in percpu_ref_exit() 174 unsigned long count = 0; in percpu_ref_switch_to_atomic_rcu() local 178 count += *per_cpu_ptr(percpu_count, cpu); in percpu_ref_switch_to_atomic_rcu() 181 atomic_long_read(&data->count), count); in percpu_ref_switch_to_atomic_rcu() 195 atomic_long_add((long)count - PERCPU_COUNT_BIAS, &data->count); in percpu_ref_switch_to_atomic_rcu() 197 if (WARN_ONCE(atomic_long_read(&data->count) <= 0, in percpu_ref_switch_to_atomic_rcu() 199 data->release, atomic_long_read(&data->count)) && in percpu_ref_switch_to_atomic_rcu() 250 atomic_long_add(PERCPU_COUNT_BIAS, &ref->data->count); in __percpu_ref_switch_to_percpu() 413 unsigned long count, flags; in percpu_ref_is_zero() local [all …]
|
D | test_firmware.c | 182 static int __kstrncpy(char **dst, const char *name, size_t count, gfp_t gfp) in __kstrncpy() argument 184 *dst = kstrndup(name, count, gfp); in __kstrncpy() 187 return count; in __kstrncpy() 219 const char *buf, size_t count) in reset_store() argument 236 ret = count; in reset_store() 297 const char *buf, size_t count) in config_name_store() argument 303 ret = __kstrncpy(&test_fw_config->name, buf, count, GFP_KERNEL); in config_name_store() 423 const char *buf, size_t count) in config_num_requests_store() argument 435 rc = __test_dev_config_update_u8(buf, count, in config_num_requests_store() 453 const char *buf, size_t count) in config_into_buf_store() argument [all …]
|
D | oid_registry.c | 136 int count; in sprint_oid() local 142 ret = count = snprintf(buffer, bufsize, "%u.%u", n / 40, n % 40); in sprint_oid() 143 if (count >= bufsize) in sprint_oid() 145 buffer += count; in sprint_oid() 146 bufsize -= count; in sprint_oid() 163 ret += count = snprintf(buffer, bufsize, ".%lu", num); in sprint_oid() 164 if (count >= bufsize) in sprint_oid() 166 buffer += count; in sprint_oid() 167 bufsize -= count; in sprint_oid()
|
D | iov_iter.c | 112 if (unlikely(i->count < n)) \ 113 n = i->count; \ 145 i->count -= n; \ 181 if (unlikely(bytes > i->count)) in copy_page_to_iter_iovec() 182 bytes = i->count; in copy_page_to_iter_iovec() 250 i->count -= wanted - bytes; in copy_page_to_iter_iovec() 265 if (unlikely(bytes > i->count)) in copy_page_from_iter_iovec() 266 bytes = i->count; in copy_page_from_iter_iovec() 334 i->count -= wanted - bytes; in copy_page_from_iter_iovec() 394 if (unlikely(bytes > i->count)) in copy_page_to_iter_pipe() [all …]
|
D | kstrtox.c | 379 int kstrtobool_from_user(const char __user *s, size_t count, bool *res) in kstrtobool_from_user() argument 384 count = min(count, sizeof(buf) - 1); in kstrtobool_from_user() 385 if (copy_from_user(buf, s, count)) in kstrtobool_from_user() 387 buf[count] = '\0'; in kstrtobool_from_user() 393 int f(const char __user *s, size_t count, unsigned int base, type *res) \ 398 count = min(count, sizeof(buf) - 1); \ 399 if (copy_from_user(buf, s, count)) \ 401 buf[count] = '\0'; \
|
D | kobject_uevent.c | 62 static int kobject_action_type(const char *buf, size_t count, in kobject_action_type() argument 71 if (count && (buf[count-1] == '\n' || buf[count-1] == '\0')) in kobject_action_type() 72 count--; in kobject_action_type() 74 if (!count) in kobject_action_type() 77 args_start = strnchr(buf, count, ' '); in kobject_action_type() 82 count_first = count; in kobject_action_type() 114 static int kobject_action_args(const char *buf, size_t count, in kobject_action_args() argument 122 if (count && (buf[count - 1] == '\n' || buf[count - 1] == '\0')) in kobject_action_args() 123 count--; in kobject_action_args() 125 if (!count) in kobject_action_args() [all …]
|
D | rbtree_test.c | 165 int count; in black_path_count() local 166 for (count = 0; rb; rb = rb_parent(rb)) in black_path_count() 167 count += !is_red(rb); in black_path_count() 168 return count; in black_path_count() 174 int count = 0; in check_postorder_foreach() local 176 count++; in check_postorder_foreach() 178 WARN_ON_ONCE(count != nr_nodes); in check_postorder_foreach() 184 int count = 0; in check_postorder() local 186 count++; in check_postorder() 188 WARN_ON_ONCE(count != nr_nodes); in check_postorder() [all …]
|
D | list_sort.c | 56 u8 count = 0; in merge_final() local 89 if (unlikely(!++count)) in merge_final() 188 size_t count = 0; /* Count of pending */ in list_sort() local 219 for (bits = count; bits & 1; bits >>= 1) in list_sort() 236 count++; in list_sort()
|
D | argv_split.c | 14 int count = 0; in count_argc() local 22 count++; in count_argc() 26 return count; in count_argc()
|
D | test_kmod.c | 566 const char *buf, size_t count) in trigger_config_store() argument 593 ret = count; in trigger_config_store() 604 static int __kstrncpy(char **dst, const char *name, size_t count, gfp_t gfp) in __kstrncpy() argument 606 *dst = kstrndup(name, count, gfp); in __kstrncpy() 609 return count; in __kstrncpy() 614 size_t count) in config_copy_test_driver_name() argument 616 return __kstrncpy(&config->test_driver, name, count, GFP_KERNEL); in config_copy_test_driver_name() 621 size_t count) in config_copy_test_fs() argument 623 return __kstrncpy(&config->test_fs, name, count, GFP_KERNEL); in config_copy_test_fs() 654 const char *buf, size_t count) in config_test_driver_store() argument [all …]
|
/lib/zstd/ |
D | fse_compress.c | 262 int count = normalizedCounter[charnum++]; in FSE_writeNCount_generic() local 264 remaining -= count < 0 ? -count : count; in FSE_writeNCount_generic() 265 count++; /* +1 for extra accuracy */ in FSE_writeNCount_generic() 266 if (count >= threshold) in FSE_writeNCount_generic() 267 count += max; /* [0..max[ [max..threshold[ (...) [threshold+max 2*threshold[ */ in FSE_writeNCount_generic() 268 bitStream += count << bitCount; in FSE_writeNCount_generic() 270 bitCount -= (count < max); in FSE_writeNCount_generic() 271 previous0 = (count == 1); in FSE_writeNCount_generic() 324 size_t FSE_count_simple(unsigned *count, unsigned *maxSymbolValuePtr, const void *src, size_t srcSi… in FSE_count_simple() argument 331 memset(count, 0, (maxSymbolValue + 1) * sizeof(*count)); in FSE_count_simple() [all …]
|
D | huf_compress.c | 92 U32 *count; in HUF_compressWeights_wksp() local 100 count = (U32 *)workspace + spaceUsed32; in HUF_compressWeights_wksp() 116 CHECK_V_F(maxCount, FSE_count_simple(count, &maxSymbolValue, weightTable, wtSize)); in HUF_compressWeights_wksp() 124 CHECK_F(FSE_normalizeCount(norm, tableLog, count, wtSize, maxSymbolValue)); in HUF_compressWeights_wksp() 284 U32 count; member 341 U32 const highTotal = huffNode[highPos].count; in HUF_setMaxHeight() 342 U32 const lowTotal = 2 * huffNode[lowPos].count; in HUF_setMaxHeight() 389 static void HUF_sort(nodeElt *huffNode, const U32 *count, U32 maxSymbolValue) in HUF_sort() argument 396 U32 r = BIT_highbit32(count[n] + 1); in HUF_sort() 404 U32 const c = count[n]; in HUF_sort() [all …]
|
D | entropy_common.c | 119 int count; in FSE_readNCount() local 122 count = bitStream & (threshold - 1); in FSE_readNCount() 125 count = bitStream & (2 * threshold - 1); in FSE_readNCount() 126 if (count >= threshold) in FSE_readNCount() 127 count -= max; in FSE_readNCount() 131 count--; /* extra accuracy */ in FSE_readNCount() 132 remaining -= count < 0 ? -count : count; /* -1 means +1 */ in FSE_readNCount() 133 normalizedCounter[charnum++] = (short)count; in FSE_readNCount() 134 previous0 = !count; in FSE_readNCount()
|
/lib/zlib_inflate/ |
D | inftrees.c | 44 unsigned short count[MAXBITS+1]; /* number of codes of each length */ in zlib_inflate_table() local 94 count[len] = 0; in zlib_inflate_table() 96 count[lens[sym]]++; in zlib_inflate_table() 101 if (count[max] != 0) break; in zlib_inflate_table() 113 if (count[min] != 0) break; in zlib_inflate_table() 120 left -= count[len]; in zlib_inflate_table() 129 offs[len + 1] = offs[len] + count[len]; in zlib_inflate_table() 240 if (--(count[len]) == 0) { in zlib_inflate_table() 258 left -= count[curr + drop]; in zlib_inflate_table()
|
/lib/pldmfw/ |
D | pldmfw_private.h | 165 #define pldm_for_each_desc_tlv(i, desc, start, count) \ argument 167 (i) < (count); \ 199 #define pldm_for_each_record(i, record, start, count) \ argument 201 (i) < (count); \ 233 #define pldm_for_each_component(i, component, start, count) \ argument 235 (i) < (count); \
|
/lib/mpi/ |
D | mpi-bit.c | 158 void mpi_rshift_limbs(MPI a, unsigned int count) in mpi_rshift_limbs() argument 164 if (count >= n) { in mpi_rshift_limbs() 169 for (i = 0; i < n - count; i++) in mpi_rshift_limbs() 170 ap[i] = ap[i+count]; in mpi_rshift_limbs() 172 a->nlimbs -= count; in mpi_rshift_limbs() 250 void mpi_lshift_limbs(MPI a, unsigned int count) in mpi_lshift_limbs() argument 256 if (!count || !n) in mpi_lshift_limbs() 259 RESIZE_IF_NEEDED(a, n+count); in mpi_lshift_limbs() 263 ap[i+count] = ap[i]; in mpi_lshift_limbs() 264 for (i = 0; i < count; i++) in mpi_lshift_limbs() [all …]
|
/lib/reed_solomon/ |
D | decode_rs.c | 24 int count = 0; variable 199 count = 0; /* Number of roots of lambda(x) */ 217 root[count] = i; 218 loc[count] = k; 222 if (++count == deg_lambda) 225 if (deg_lambda != count) { 253 for (j = count - 1; j >= 0; j--) { 291 for (j = 0; j < count; j++) { 309 for (i = 0; i < count; i++) { 317 for (i = 0; i < count; i++) {
|