Home
last modified time | relevance | path

Searched refs:new (Results 1 – 22 of 22) sorted by relevance

/lib/
Drefcount.c66 unsigned int new, val = atomic_read(&r->refs); in refcount_add_not_zero_checked() local
75 new = val + i; in refcount_add_not_zero_checked()
76 if (new < val) in refcount_add_not_zero_checked()
77 new = UINT_MAX; in refcount_add_not_zero_checked()
79 } while (!atomic_try_cmpxchg_relaxed(&r->refs, &val, new)); in refcount_add_not_zero_checked()
81 WARN_ONCE(new == UINT_MAX, "refcount_t: saturated; leaking memory.\n"); in refcount_add_not_zero_checked()
123 unsigned int new, val = atomic_read(&r->refs); in refcount_inc_not_zero_checked() local
126 new = val + 1; in refcount_inc_not_zero_checked()
131 if (unlikely(!new)) in refcount_inc_not_zero_checked()
134 } while (!atomic_try_cmpxchg_relaxed(&r->refs, &val, new)); in refcount_inc_not_zero_checked()
[all …]
Derrseq.c78 errseq_t new; in errseq_set() local
81 new = (old & ~(MAX_ERRNO|ERRSEQ_SEEN)) | -err; in errseq_set()
85 new += ERRSEQ_CTR_INC; in errseq_set()
88 if (new == old) { in errseq_set()
89 cur = new; in errseq_set()
94 cur = cmpxchg(eseq, old, new); in errseq_set()
100 if (likely(cur == old || cur == new)) in errseq_set()
177 errseq_t old, new; in errseq_check_and_advance() local
198 new = old | ERRSEQ_SEEN; in errseq_check_and_advance()
199 if (new != old) in errseq_check_and_advance()
[all …]
Dlockref.c17 struct lockref new = old, prev = old; \
21 new.lock_count); \
47 new.count++; in lockref_get()
68 new.count++; in lockref_get_not_zero()
96 new.count--; in lockref_put_not_zero()
123 new.count++; in lockref_get_or_lock()
149 new.count--; in lockref_put_return()
153 return new.count; in lockref_put_return()
167 new.count--; in lockref_put_or_lock()
204 new.count++; in lockref_get_not_dead()
Drbtree_test.c34 struct rb_node **new = &root->rb_root.rb_node, *parent = NULL; in insert() local
37 while (*new) { in insert()
38 parent = *new; in insert()
40 new = &parent->rb_left; in insert()
42 new = &parent->rb_right; in insert()
45 rb_link_node(&node->rb, parent, new); in insert()
51 struct rb_node **new = &root->rb_root.rb_node, *parent = NULL; in insert_cached() local
55 while (*new) { in insert_cached()
56 parent = *new; in insert_cached()
58 new = &parent->rb_left; in insert_cached()
[all …]
Dmemcat_p.c12 void **p = a, **new; in __memcat_p() local
23 new = kmalloc_array(nr, sizeof(void *), GFP_KERNEL); in __memcat_p()
24 if (!new) in __memcat_p()
29 new[nr] = *p; in __memcat_p()
31 return new; in __memcat_p()
Drbtree.c75 __rb_rotate_set_parents(struct rb_node *old, struct rb_node *new, in __rb_rotate_set_parents() argument
79 new->__rb_parent_color = old->__rb_parent_color; in __rb_rotate_set_parents()
80 rb_set_parent_color(old, new, color); in __rb_rotate_set_parents()
81 __rb_change_child(old, new, parent, root); in __rb_rotate_set_parents()
86 void (*augment_rotate)(struct rb_node *old, struct rb_node *new)) in __rb_insert() argument
228 void (*augment_rotate)(struct rb_node *old, struct rb_node *new)) in ____rb_erase_color() argument
411 void (*augment_rotate)(struct rb_node *old, struct rb_node *new)) in __rb_erase_color() argument
425 static inline void dummy_copy(struct rb_node *old, struct rb_node *new) {} in dummy_copy() argument
426 static inline void dummy_rotate(struct rb_node *old, struct rb_node *new) {} in dummy_rotate() argument
457 void (*augment_rotate)(struct rb_node *old, struct rb_node *new)) in __rb_insert_augmented() argument
[all …]
Dlist_debug.c20 bool __list_add_valid(struct list_head *new, struct list_head *prev, in __list_add_valid() argument
29 CHECK_DATA_CORRUPTION(new == prev || new == next, in __list_add_valid()
31 new, prev, next)) in __list_add_valid()
Dcrc-t10dif.c26 struct crypto_shash *new, *old; in crc_t10dif_rehash() local
40 new = crypto_alloc_shash("crct10dif", 0, 0); in crc_t10dif_rehash()
41 if (IS_ERR(new)) { in crc_t10dif_rehash()
45 rcu_assign_pointer(crct10dif_tfm, new); in crc_t10dif_rehash()
Dstackdepot.c272 struct stack_record *new = in stack_depot_save() local
275 if (new) { in stack_depot_save()
276 new->next = *bucket; in stack_depot_save()
281 smp_store_release(bucket, new); in stack_depot_save()
282 found = new; in stack_depot_save()
Datomic64_test.c79 #define XCHG_FAMILY_TEST(bit, init, new) \ argument
81 FAMILY_TEST(TEST_ARGS, bit, xchg, init, init, new, new); \
84 #define CMPXCHG_FAMILY_TEST(bit, init, new, wrong) \ argument
87 init, init, new, init, new); \
89 init, init, init, wrong, new); \
Dtest_kmod.c880 unsigned long new; in test_dev_config_update_uint_sync() local
883 ret = kstrtoul(buf, 10, &new); in test_dev_config_update_uint_sync()
887 if (new > UINT_MAX) in test_dev_config_update_uint_sync()
893 *(unsigned int *)config = new; in test_dev_config_update_uint_sync()
918 unsigned long new; in test_dev_config_update_uint_range() local
920 ret = kstrtoul(buf, 10, &new); in test_dev_config_update_uint_range()
924 if (new < min || new > max) in test_dev_config_update_uint_range()
928 *config = new; in test_dev_config_update_uint_range()
940 long new; in test_dev_config_update_int() local
942 ret = kstrtol(buf, 10, &new); in test_dev_config_update_int()
[all …]
Ddebugobjects.c162 struct debug_obj *new[ODEBUG_BATCH_SIZE]; in fill_pool() local
166 new[cnt] = kmem_cache_zalloc(obj_cache, gfp); in fill_pool()
167 if (!new[cnt]) in fill_pool()
175 hlist_add_head(&new[--cnt]->node, &obj_pool); in fill_pool()
1295 struct debug_obj *obj, *new; in debug_objects_replace_static_objects() local
1323 new = hlist_entry(obj_pool.first, typeof(*obj), node); in debug_objects_replace_static_objects()
1324 hlist_del(&new->node); in debug_objects_replace_static_objects()
1326 *new = *obj; in debug_objects_replace_static_objects()
1327 hlist_add_head(&new->node, &db->list); in debug_objects_replace_static_objects()
Dbitmap.c792 const unsigned long *old, const unsigned long *new, in bitmap_remap() argument
801 w = bitmap_weight(new, nbits); in bitmap_remap()
808 set_bit(bitmap_ord_to_pos(new, n % w, nbits), dst); in bitmap_remap()
839 const unsigned long *new, int bits) in bitmap_bitremap() argument
841 int w = bitmap_weight(new, bits); in bitmap_bitremap()
846 return bitmap_ord_to_pos(new, n % w, bits); in bitmap_bitremap()
Dbtree.c471 unsigned long *new; in btree_insert_level() local
473 new = btree_node_alloc(head, gfp); in btree_insert_level()
474 if (!new) in btree_insert_level()
478 new, level + 1, gfp); in btree_insert_level()
480 mempool_free(new, head->mempool); in btree_insert_level()
484 setkey(geo, new, i, bkey(geo, node, i)); in btree_insert_level()
485 setval(geo, new, i, bval(geo, node, i)); in btree_insert_level()
Datomic64.c163 s64 atomic64_xchg(atomic64_t *v, s64 new) in atomic64_xchg() argument
171 v->counter = new; in atomic64_xchg()
Dtest_firmware.c339 long new; in test_dev_config_update_u8() local
341 ret = kstrtol(buf, 10, &new); in test_dev_config_update_u8()
345 if (new > U8_MAX) in test_dev_config_update_u8()
349 *(u8 *)cfg = new; in test_dev_config_update_u8()
Dlru_cache.c66 unsigned long old, new, val; in lc_try_lock()
69 new = old | LC_LOCKED; in lc_try_lock()
70 val = cmpxchg(&lc->flags, old, new); in lc_try_lock()
Diov_iter.c1596 const void *dup_iter(struct iov_iter *new, struct iov_iter *old, gfp_t flags) in dup_iter() argument
1598 *new = *old; in dup_iter()
1599 if (unlikely(iov_iter_is_pipe(new))) { in dup_iter()
1603 if (unlikely(iov_iter_is_discard(new))) in dup_iter()
1605 if (iov_iter_is_bvec(new)) in dup_iter()
1606 return new->bvec = kmemdup(new->bvec, in dup_iter()
1607 new->nr_segs * sizeof(struct bio_vec), in dup_iter()
1611 return new->iov = kmemdup(new->iov, in dup_iter()
1612 new->nr_segs * sizeof(struct iovec), in dup_iter()
Dstring.c1084 char *strreplace(char *s, char old, char new) in strreplace() argument
1088 *s = new; in strreplace()
DKconfig.kgdb38 intended to for the development of new kgdb stubs
DKconfig.debug1881 stress and performance analysis. So, any new change for vmalloc
/lib/math/
Dprime_numbers.c112 struct primes *new; in expand_to_next_prime() local
128 new = kmalloc(sizeof(*new) + bitmap_size(sz), in expand_to_next_prime()
130 if (!new) in expand_to_next_prime()
136 kfree(new); in expand_to_next_prime()
144 bitmap_fill(new->primes, sz); in expand_to_next_prime()
145 bitmap_copy(new->primes, p->primes, p->sz); in expand_to_next_prime()
146 for (y = 2UL; y < sz; y = find_next_bit(new->primes, sz, y + 1)) in expand_to_next_prime()
147 new->last = clear_multiples(y, new->primes, p->sz, sz); in expand_to_next_prime()
148 new->sz = sz; in expand_to_next_prime()
150 BUG_ON(new->last <= x); in expand_to_next_prime()
[all …]