/include/asm-generic/ |
D | mutex-xchg.h | 26 __mutex_fastpath_lock(atomic_t *count, void (*fail_fn)(atomic_t *)) in __mutex_fastpath_lock() argument 28 if (unlikely(atomic_xchg(count, 0) != 1)) in __mutex_fastpath_lock() 34 if (likely(atomic_xchg(count, -1) != 1)) in __mutex_fastpath_lock() 35 fail_fn(count); in __mutex_fastpath_lock() 47 __mutex_fastpath_lock_retval(atomic_t *count) in __mutex_fastpath_lock_retval() argument 49 if (unlikely(atomic_xchg(count, 0) != 1)) in __mutex_fastpath_lock_retval() 50 if (likely(atomic_xchg(count, -1) != 1)) in __mutex_fastpath_lock_retval() 68 __mutex_fastpath_unlock(atomic_t *count, void (*fail_fn)(atomic_t *)) in __mutex_fastpath_unlock() argument 70 if (unlikely(atomic_xchg(count, 1) != 0)) in __mutex_fastpath_unlock() 71 fail_fn(count); in __mutex_fastpath_unlock() [all …]
|
D | mutex-dec.h | 21 __mutex_fastpath_lock(atomic_t *count, void (*fail_fn)(atomic_t *)) in __mutex_fastpath_lock() argument 23 if (unlikely(atomic_dec_return(count) < 0)) in __mutex_fastpath_lock() 24 fail_fn(count); in __mutex_fastpath_lock() 36 __mutex_fastpath_lock_retval(atomic_t *count) in __mutex_fastpath_lock_retval() argument 38 if (unlikely(atomic_dec_return(count) < 0)) in __mutex_fastpath_lock_retval() 57 __mutex_fastpath_unlock(atomic_t *count, void (*fail_fn)(atomic_t *)) in __mutex_fastpath_unlock() argument 59 if (unlikely(atomic_inc_return(count) <= 0)) in __mutex_fastpath_unlock() 60 fail_fn(count); in __mutex_fastpath_unlock() 81 __mutex_fastpath_trylock(atomic_t *count, int (*fail_fn)(atomic_t *)) in __mutex_fastpath_trylock() argument 83 if (likely(atomic_cmpxchg(count, 1, 0) == 1)) in __mutex_fastpath_trylock()
|
D | mutex-null.h | 13 #define __mutex_fastpath_lock(count, fail_fn) fail_fn(count) argument 14 #define __mutex_fastpath_lock_retval(count) (-1) argument 15 #define __mutex_fastpath_unlock(count, fail_fn) fail_fn(count) argument 16 #define __mutex_fastpath_trylock(count, fail_fn) fail_fn(count) argument
|
D | ide_iops.h | 8 static __inline__ void __ide_mm_insw(void __iomem *port, void *addr, u32 count) in __ide_mm_insw() argument 10 while (count--) { in __ide_mm_insw() 16 static __inline__ void __ide_mm_insl(void __iomem *port, void *addr, u32 count) in __ide_mm_insl() argument 18 while (count--) { in __ide_mm_insl() 24 static __inline__ void __ide_mm_outsw(void __iomem *port, void *addr, u32 count) in __ide_mm_outsw() argument 26 while (count--) { in __ide_mm_outsw() 32 static __inline__ void __ide_mm_outsl(void __iomem * port, void *addr, u32 count) in __ide_mm_outsl() argument 34 while (count--) { in __ide_mm_outsl()
|
D | rwsem.h | 36 if (unlikely(atomic_long_inc_return((atomic_long_t *)&sem->count) <= 0)) in __down_read() 44 while ((tmp = sem->count) >= 0) { in __down_read_trylock() 45 if (tmp == cmpxchg(&sem->count, tmp, in __down_read_trylock() 61 (atomic_long_t *)&sem->count); in __down_write_nested() 75 tmp = cmpxchg(&sem->count, RWSEM_UNLOCKED_VALUE, in __down_write_trylock() 87 tmp = atomic_long_dec_return((atomic_long_t *)&sem->count); in __up_read() 98 (atomic_long_t *)&sem->count) < 0)) in __up_write() 107 atomic_long_add(delta, (atomic_long_t *)&sem->count); in rwsem_atomic_add() 118 (atomic_long_t *)&sem->count); in __downgrade_write() 128 return atomic_long_add_return(delta, (atomic_long_t *)&sem->count); in rwsem_atomic_update()
|
D | io.h | 165 static inline void insb(unsigned long addr, void *buffer, int count) in insb() argument 167 if (count) { in insb() 172 } while (--count); in insb() 178 static inline void insw(unsigned long addr, void *buffer, int count) in insw() argument 180 if (count) { in insw() 185 } while (--count); in insw() 191 static inline void insl(unsigned long addr, void *buffer, int count) in insl() argument 193 if (count) { in insl() 198 } while (--count); in insl() 204 static inline void outsb(unsigned long addr, const void *buffer, int count) in outsb() argument [all …]
|
D | iomap.h | 51 extern void ioread8_rep(void __iomem *port, void *buf, unsigned long count); 52 extern void ioread16_rep(void __iomem *port, void *buf, unsigned long count); 53 extern void ioread32_rep(void __iomem *port, void *buf, unsigned long count); 55 extern void iowrite8_rep(void __iomem *port, const void *buf, unsigned long count); 56 extern void iowrite16_rep(void __iomem *port, const void *buf, unsigned long count); 57 extern void iowrite32_rep(void __iomem *port, const void *buf, unsigned long count);
|
/include/linux/ |
D | vt_buffer.h | 31 static inline void scr_memsetw(u16 *s, u16 c, unsigned int count) in scr_memsetw() argument 33 count /= 2; in scr_memsetw() 34 while (count--) in scr_memsetw() 40 static inline void scr_memcpyw(u16 *d, const u16 *s, unsigned int count) in scr_memcpyw() argument 42 count /= 2; in scr_memcpyw() 43 while (count--) in scr_memcpyw() 49 static inline void scr_memmovew(u16 *d, const u16 *s, unsigned int count) in scr_memmovew() argument 52 scr_memcpyw(d, s, count); in scr_memmovew() 54 count /= 2; in scr_memmovew() 55 d += count; in scr_memmovew() [all …]
|
D | regset.h | 60 unsigned int pos, unsigned int count, 81 unsigned int pos, unsigned int count, 220 static inline int user_regset_copyout(unsigned int *pos, unsigned int *count, in user_regset_copyout() argument 225 if (*count == 0) in user_regset_copyout() 229 unsigned int copy = (end_pos < 0 ? *count in user_regset_copyout() 230 : min(*count, end_pos - *pos)); in user_regset_copyout() 240 *count -= copy; in user_regset_copyout() 245 static inline int user_regset_copyin(unsigned int *pos, unsigned int *count, in user_regset_copyin() argument 250 if (*count == 0) in user_regset_copyin() 254 unsigned int copy = (end_pos < 0 ? *count in user_regset_copyin() [all …]
|
D | uio.h | 31 size_t count; member 60 .iov_len = min(iter->count, in iov_iter_iovec() 68 (iter).count && \ 88 unsigned long nr_segs, size_t count); 97 return i->count; in iov_iter_count() 106 static inline void iov_iter_truncate(struct iov_iter *i, u64 count) in iov_iter_truncate() argument 114 if (i->count > count) in iov_iter_truncate() 115 i->count = count; in iov_iter_truncate() 122 static inline void iov_iter_reexpand(struct iov_iter *i, size_t count) in iov_iter_reexpand() argument 124 i->count = count; in iov_iter_reexpand()
|
D | dynamic_queue_limits.h | 72 static inline void dql_queued(struct dql *dql, unsigned int count) in dql_queued() argument 74 BUG_ON(count > DQL_MAX_OBJECT); in dql_queued() 76 dql->last_obj_cnt = count; in dql_queued() 85 dql->num_queued += count; in dql_queued() 95 void dql_completed(struct dql *dql, unsigned int count);
|
D | percpu_counter.h | 21 s64 count; member 64 return fbc->count; in percpu_counter_read() 74 s64 ret = fbc->count; in percpu_counter_read_positive() 90 s64 count; member 96 fbc->count = amount; in percpu_counter_init() 106 fbc->count = amount; in percpu_counter_set() 111 if (fbc->count > rhs) in percpu_counter_compare() 113 else if (fbc->count < rhs) in percpu_counter_compare() 123 fbc->count += amount; in percpu_counter_add() 135 return fbc->count; in percpu_counter_read() [all …]
|
D | percpu-refcount.h | 84 atomic_long_t count; member 166 atomic_long_inc(&ref->count); in percpu_ref_get() 191 ret = atomic_long_inc_not_zero(&ref->count); in percpu_ref_tryget() 225 ret = atomic_long_inc_not_zero(&ref->count); in percpu_ref_tryget_live() 250 else if (unlikely(atomic_long_dec_and_test(&ref->count))) in percpu_ref_put() 270 return !atomic_long_read(&ref->count); in percpu_ref_is_zero()
|
D | nsproxy.h | 30 atomic_t count; member 75 if (atomic_dec_and_test(&ns->count)) { in put_nsproxy() 82 atomic_inc(&ns->count); in get_nsproxy()
|
D | seq_file.h | 22 size_t count; member 55 BUG_ON(m->count > m->size); in seq_get_buf() 56 if (m->count < m->size) in seq_get_buf() 57 *bufp = m->buf + m->count; in seq_get_buf() 61 return m->size - m->count; in seq_get_buf() 76 m->count = m->size; in seq_commit() 78 BUG_ON(m->count + num > m->size); in seq_commit() 79 m->count += num; in seq_commit() 93 m->pad_until = m->count + size; in seq_setwidth()
|
D | lockref.h | 31 int count; member 48 return ((int)l->count < 0); in __lockref_is_dead()
|
D | dma-contiguous.h | 114 struct page *dma_alloc_from_contiguous(struct device *dev, size_t count, 117 int count); 147 struct page *dma_alloc_from_contiguous(struct device *dev, size_t count, in dma_alloc_from_contiguous() argument 155 int count) in dma_release_from_contiguous() argument
|
D | user_namespace.h | 16 u32 count; member 28 atomic_t count; member 50 atomic_inc(&ns->count); in get_user_ns() 60 if (ns && atomic_dec_and_test(&ns->count)) in put_user_ns()
|
/include/trace/events/ |
D | regmap.h | 68 TP_PROTO(struct regmap *map, unsigned int reg, int count), 70 TP_ARGS(map, reg, count), 75 __field( int, count ) 81 __entry->count = count; 86 (int)__entry->count) 91 TP_PROTO(struct regmap *map, unsigned int reg, int count), 93 TP_ARGS(map, reg, count) 98 TP_PROTO(struct regmap *map, unsigned int reg, int count), 100 TP_ARGS(map, reg, count) 105 TP_PROTO(struct regmap *map, unsigned int reg, int count), [all …]
|
/include/xen/ |
D | grant_table.h | 58 u16 count; member 94 int gnttab_alloc_grant_references(u16 count, grant_ref_t *pprivate_head); 108 void (*fn)(void *), void *arg, u16 count); 156 unsigned int count; member 168 struct page **pages, unsigned int count); 171 struct page **pages, unsigned int count); 182 void gnttab_batch_map(struct gnttab_map_grant_ref *batch, unsigned count); 183 void gnttab_batch_copy(struct gnttab_copy *batch, unsigned count);
|
/include/uapi/xen/ |
D | gntdev.h | 53 uint32_t count; member 75 uint32_t count; member 101 uint32_t count; member 116 uint32_t count; member
|
/include/sound/ |
D | rawmidi.h | 161 const unsigned char *buffer, int count); 164 unsigned char *buffer, int count); 165 int snd_rawmidi_transmit_ack(struct snd_rawmidi_substream *substream, int count); 167 unsigned char *buffer, int count); 169 unsigned char *buffer, int count); 171 int count); 187 unsigned char *buf, long count); 189 const unsigned char *buf, long count);
|
/include/uapi/linux/netfilter/ |
D | xt_multiport.h | 17 __u8 count; /* Number of ports */ member 23 __u8 count; /* Number of ports */ member
|
/include/net/netfilter/ |
D | nf_conntrack_tuple.h | 159 int count; in nf_ct_tuple_src_mask_cmp() local 161 for (count = 0; count < NF_CT_TUPLE_L3SIZE; count++) { in nf_ct_tuple_src_mask_cmp() 162 if ((t1->src.u3.all[count] ^ t2->src.u3.all[count]) & in nf_ct_tuple_src_mask_cmp() 163 mask->src.u3.all[count]) in nf_ct_tuple_src_mask_cmp()
|
/include/crypto/ |
D | sha.h | 68 u64 count; member 74 u64 count; member 80 u64 count[2]; member
|