| /include/linux/ |
| D | refcount.h | 176 int old = refcount_read(r); in __refcount_add_not_zero() local 179 if (!old) in __refcount_add_not_zero() 181 } while (!atomic_try_cmpxchg_relaxed(&r->refs, &old, old + i)); in __refcount_add_not_zero() 184 *oldp = old; in __refcount_add_not_zero() 186 if (unlikely(old < 0 || old + i < 0)) in __refcount_add_not_zero() 189 return old; in __refcount_add_not_zero() 219 int old = refcount_read(r); in __refcount_add_not_zero_limited_acquire() local 222 if (!old) in __refcount_add_not_zero_limited_acquire() 225 if (i > limit - old) { in __refcount_add_not_zero_limited_acquire() 227 *oldp = old; in __refcount_add_not_zero_limited_acquire() [all …]
|
| D | iversion.h | 279 inode_eq_iversion_raw(const struct inode *inode, u64 old) in inode_eq_iversion_raw() argument 281 return inode_peek_iversion_raw(inode) == old; in inode_eq_iversion_raw() 296 inode_eq_iversion(const struct inode *inode, u64 old) in inode_eq_iversion() argument 298 return inode_peek_iversion(inode) == old; in inode_eq_iversion()
|
| D | rbtree_augmented.h | 29 void (*copy)(struct rb_node *old, struct rb_node *new); 30 void (*rotate)(struct rb_node *old, struct rb_node *new); 34 void (*augment_rotate)(struct rb_node *old, struct rb_node *new)); 115 RBSTRUCT *old = rb_entry(rb_old, RBSTRUCT, RBFIELD); \ 117 new->RBAUGMENTED = old->RBAUGMENTED; \ 122 RBSTRUCT *old = rb_entry(rb_old, RBSTRUCT, RBFIELD); \ 124 new->RBAUGMENTED = old->RBAUGMENTED; \ 125 RBCOMPUTE(old, false); \ 195 __rb_change_child(struct rb_node *old, struct rb_node *new, in __rb_change_child() argument 199 if (parent->rb_left == old) in __rb_change_child() [all …]
|
| D | via-core.h | 202 u8 old; in via_write_reg_mask() local 205 old = inb(port + 1); in via_write_reg_mask() 206 outb((data & mask) | (old & ~mask), port + 1); in via_write_reg_mask() 214 u8 old = inb(VIA_MISC_REG_READ); in via_write_misc_reg_mask() local 215 outb((data & mask) | (old & ~mask), VIA_MISC_REG_WRITE); in via_write_misc_reg_mask()
|
| D | page_owner.h | 19 extern void __folio_copy_owner(struct folio *newfolio, struct folio *old); 44 static inline void folio_copy_owner(struct folio *newfolio, struct folio *old) in folio_copy_owner() argument 47 __folio_copy_owner(newfolio, old); in folio_copy_owner()
|
| D | cmpxchg-emu.h | 13 uintptr_t cmpxchg_emu_u8(volatile u8 *p, uintptr_t old, uintptr_t new);
|
| D | rculist.h | 200 static inline void list_replace_rcu(struct list_head *old, in list_replace_rcu() argument 203 new->next = old->next; in list_replace_rcu() 204 new->prev = old->prev; in list_replace_rcu() 207 old->prev = LIST_POISON2; in list_replace_rcu() 529 static inline void hlist_replace_rcu(struct hlist_node *old, in hlist_replace_rcu() argument 532 struct hlist_node *next = old->next; in hlist_replace_rcu() 535 WRITE_ONCE(new->pprev, old->pprev); in hlist_replace_rcu() 539 WRITE_ONCE(old->pprev, LIST_POISON2); in hlist_replace_rcu()
|
| /include/asm-generic/bitops/ |
| D | lock.h | 21 long old; in arch_test_and_set_bit_lock() local 28 old = raw_atomic_long_fetch_or_acquire(mask, (atomic_long_t *)p); in arch_test_and_set_bit_lock() 29 return !!(old & mask); in arch_test_and_set_bit_lock() 61 unsigned long old; in arch___clear_bit_unlock() local 64 old = READ_ONCE(*p); in arch___clear_bit_unlock() 65 old &= ~BIT_MASK(nr); in arch___clear_bit_unlock() 66 raw_atomic_long_set_release((atomic_long_t *)p, old); in arch___clear_bit_unlock() 73 long old; in arch_xor_unlock_is_negative_byte() local 75 old = raw_atomic_long_fetch_xor_release(mask, (atomic_long_t *)p); in arch_xor_unlock_is_negative_byte() 76 return !!(old & BIT(7)); in arch_xor_unlock_is_negative_byte()
|
| D | atomic.h | 38 long old; in arch_test_and_set_bit() local 42 old = raw_atomic_long_fetch_or(mask, (atomic_long_t *)p); in arch_test_and_set_bit() 43 return !!(old & mask); in arch_test_and_set_bit() 49 long old; in arch_test_and_clear_bit() local 53 old = raw_atomic_long_fetch_andnot(mask, (atomic_long_t *)p); in arch_test_and_clear_bit() 54 return !!(old & mask); in arch_test_and_clear_bit() 60 long old; in arch_test_and_change_bit() local 64 old = raw_atomic_long_fetch_xor(mask, (atomic_long_t *)p); in arch_test_and_change_bit() 65 return !!(old & mask); in arch_test_and_change_bit()
|
| D | generic-non-atomic.h | 77 unsigned long old = *p; in generic___test_and_set_bit() local 79 *p = old | mask; in generic___test_and_set_bit() 80 return (old & mask) != 0; in generic___test_and_set_bit() 97 unsigned long old = *p; in generic___test_and_clear_bit() local 99 *p = old & ~mask; in generic___test_and_clear_bit() 100 return (old & mask) != 0; in generic___test_and_clear_bit() 109 unsigned long old = *p; in generic___test_and_change_bit() local 111 *p = old ^ mask; in generic___test_and_change_bit() 112 return (old & mask) != 0; in generic___test_and_change_bit()
|
| /include/asm-generic/ |
| D | cmpxchg-local.h | 16 unsigned long old, unsigned long new, int size) in __generic_cmpxchg_local() argument 29 if (prev == (old & 0xffu)) in __generic_cmpxchg_local() 33 if (prev == (old & 0xffffu)) in __generic_cmpxchg_local() 37 if (prev == (old & 0xffffffffu)) in __generic_cmpxchg_local() 41 if (prev == old) in __generic_cmpxchg_local() 55 u64 old, u64 new) in __generic_cmpxchg64_local() argument 62 if (prev == old) in __generic_cmpxchg64_local()
|
| D | atomic.h | 22 int c, old; \ 25 while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c) \ 26 c = old; \ 32 int c, old; \ 35 while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c) \ 36 c = old; \ 44 int c, old; \ 47 while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c) \ 48 c = old; \
|
| D | spinlock.h | 55 u32 old = atomic_read(lock); in arch_spin_trylock() local 57 if ((old >> 16) != (old & 0xffff)) in arch_spin_trylock() 60 return atomic_try_cmpxchg(lock, &old, old + (1<<16)); /* SC, for RCsc */ in arch_spin_trylock()
|
| D | local64.h | 45 static inline s64 local64_cmpxchg(local64_t *l, s64 old, s64 new) in local64_cmpxchg() argument 47 return local_cmpxchg(&l->a, old, new); in local64_cmpxchg() 50 static inline bool local64_try_cmpxchg(local64_t *l, s64 *old, s64 new) in local64_try_cmpxchg() argument 52 return local_try_cmpxchg(&l->a, (long *)old, new); in local64_try_cmpxchg()
|
| /include/linux/atomic/ |
| D | atomic-long.h | 1362 raw_atomic_long_cmpxchg(atomic_long_t *v, long old, long new) in raw_atomic_long_cmpxchg() argument 1365 return raw_atomic64_cmpxchg(v, old, new); in raw_atomic_long_cmpxchg() 1367 return raw_atomic_cmpxchg(v, old, new); in raw_atomic_long_cmpxchg() 1385 raw_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new) in raw_atomic_long_cmpxchg_acquire() argument 1388 return raw_atomic64_cmpxchg_acquire(v, old, new); in raw_atomic_long_cmpxchg_acquire() 1390 return raw_atomic_cmpxchg_acquire(v, old, new); in raw_atomic_long_cmpxchg_acquire() 1408 raw_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new) in raw_atomic_long_cmpxchg_release() argument 1411 return raw_atomic64_cmpxchg_release(v, old, new); in raw_atomic_long_cmpxchg_release() 1413 return raw_atomic_cmpxchg_release(v, old, new); in raw_atomic_long_cmpxchg_release() 1431 raw_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new) in raw_atomic_long_cmpxchg_relaxed() argument [all …]
|
| D | atomic-instrumented.h | 1192 atomic_cmpxchg(atomic_t *v, int old, int new) in atomic_cmpxchg() argument 1196 return raw_atomic_cmpxchg(v, old, new); in atomic_cmpxchg() 1213 atomic_cmpxchg_acquire(atomic_t *v, int old, int new) in atomic_cmpxchg_acquire() argument 1216 return raw_atomic_cmpxchg_acquire(v, old, new); in atomic_cmpxchg_acquire() 1233 atomic_cmpxchg_release(atomic_t *v, int old, int new) in atomic_cmpxchg_release() argument 1237 return raw_atomic_cmpxchg_release(v, old, new); in atomic_cmpxchg_release() 1254 atomic_cmpxchg_relaxed(atomic_t *v, int old, int new) in atomic_cmpxchg_relaxed() argument 1257 return raw_atomic_cmpxchg_relaxed(v, old, new); in atomic_cmpxchg_relaxed() 1275 atomic_try_cmpxchg(atomic_t *v, int *old, int new) in atomic_try_cmpxchg() argument 1279 instrument_atomic_read_write(old, sizeof(*old)); in atomic_try_cmpxchg() [all …]
|
| D | atomic-arch-fallback.h | 2015 raw_atomic_cmpxchg(atomic_t *v, int old, int new) in raw_atomic_cmpxchg() argument 2018 return arch_atomic_cmpxchg(v, old, new); in raw_atomic_cmpxchg() 2022 ret = arch_atomic_cmpxchg_relaxed(v, old, new); in raw_atomic_cmpxchg() 2026 return raw_cmpxchg(&v->counter, old, new); in raw_atomic_cmpxchg() 2044 raw_atomic_cmpxchg_acquire(atomic_t *v, int old, int new) in raw_atomic_cmpxchg_acquire() argument 2047 return arch_atomic_cmpxchg_acquire(v, old, new); in raw_atomic_cmpxchg_acquire() 2049 int ret = arch_atomic_cmpxchg_relaxed(v, old, new); in raw_atomic_cmpxchg_acquire() 2053 return arch_atomic_cmpxchg(v, old, new); in raw_atomic_cmpxchg_acquire() 2055 return raw_cmpxchg_acquire(&v->counter, old, new); in raw_atomic_cmpxchg_acquire() 2073 raw_atomic_cmpxchg_release(atomic_t *v, int old, int new) in raw_atomic_cmpxchg_release() argument [all …]
|
| /include/trace/hooks/ |
| D | avc.h | 23 TP_PROTO(const struct avc_node *old, const struct avc_node *new), 24 TP_ARGS(old, new), 1);
|
| D | creds.h | 28 TP_PROTO(const struct task_struct *task, const struct cred *old), 29 TP_ARGS(task, old), 1);
|
| /include/trace/events/ |
| D | filemap.h | 169 TP_PROTO(struct file *file, errseq_t old), 171 TP_ARGS(file, old), 177 __field(errseq_t, old) 190 __entry->old = old; 196 MINOR(__entry->s_dev), __entry->i_ino, __entry->old,
|
| D | power.h | 527 TP_PROTO(bool grow, unsigned int new, unsigned int old), 529 TP_ARGS(grow, new, old), 534 __field(unsigned int, old) 540 __entry->old = old; 546 __entry->old) 549 #define trace_guest_halt_poll_ns_grow(new, old) \ argument 550 trace_guest_halt_poll_ns(true, new, old) 551 #define trace_guest_halt_poll_ns_shrink(new, old) \ argument 552 trace_guest_halt_poll_ns(false, new, old)
|
| D | kvm.h | 349 unsigned int old), 350 TP_ARGS(grow, vcpu_id, new, old), 356 __field(unsigned int, old) 363 __entry->old = old; 370 __entry->old) 373 #define trace_kvm_halt_poll_ns_grow(vcpu_id, new, old) \ argument 374 trace_kvm_halt_poll_ns(true, vcpu_id, new, old) 375 #define trace_kvm_halt_poll_ns_shrink(vcpu_id, new, old) \ argument 376 trace_kvm_halt_poll_ns(false, vcpu_id, new, old)
|
| /include/net/sctp/ |
| D | checksum.h | 55 __le32 old = sh->checksum; in sctp_compute_cksum() local 61 sh->checksum = old; in sctp_compute_cksum()
|
| /include/media/ |
| D | v4l2-event.h | 49 void (*replace)(struct v4l2_event *old, const struct v4l2_event *new); 50 void (*merge)(const struct v4l2_event *old, struct v4l2_event *new);
|
| /include/net/ |
| D | netevent.h | 20 struct dst_entry *old; member
|