/include/asm-generic/ |
D | mutex-xchg.h | 26 __mutex_fastpath_lock(atomic_t *count, void (*fail_fn)(atomic_t *)) in __mutex_fastpath_lock() argument 28 if (unlikely(atomic_xchg(count, 0) != 1)) in __mutex_fastpath_lock() 34 if (likely(atomic_xchg(count, -1) != 1)) in __mutex_fastpath_lock() 35 fail_fn(count); in __mutex_fastpath_lock() 49 __mutex_fastpath_lock_retval(atomic_t *count, int (*fail_fn)(atomic_t *)) in __mutex_fastpath_lock_retval() argument 51 if (unlikely(atomic_xchg(count, 0) != 1)) in __mutex_fastpath_lock_retval() 52 if (likely(atomic_xchg(count, -1) != 1)) in __mutex_fastpath_lock_retval() 53 return fail_fn(count); in __mutex_fastpath_lock_retval() 70 __mutex_fastpath_unlock(atomic_t *count, void (*fail_fn)(atomic_t *)) in __mutex_fastpath_unlock() argument 72 if (unlikely(atomic_xchg(count, 1) != 0)) in __mutex_fastpath_unlock() [all …]
|
D | mutex-dec.h | 21 __mutex_fastpath_lock(atomic_t *count, void (*fail_fn)(atomic_t *)) in __mutex_fastpath_lock() argument 23 if (unlikely(atomic_dec_return(count) < 0)) in __mutex_fastpath_lock() 24 fail_fn(count); in __mutex_fastpath_lock() 38 __mutex_fastpath_lock_retval(atomic_t *count, int (*fail_fn)(atomic_t *)) in __mutex_fastpath_lock_retval() argument 40 if (unlikely(atomic_dec_return(count) < 0)) in __mutex_fastpath_lock_retval() 41 return fail_fn(count); in __mutex_fastpath_lock_retval() 59 __mutex_fastpath_unlock(atomic_t *count, void (*fail_fn)(atomic_t *)) in __mutex_fastpath_unlock() argument 61 if (unlikely(atomic_inc_return(count) <= 0)) in __mutex_fastpath_unlock() 62 fail_fn(count); in __mutex_fastpath_unlock() 83 __mutex_fastpath_trylock(atomic_t *count, int (*fail_fn)(atomic_t *)) in __mutex_fastpath_trylock() argument [all …]
|
D | mutex-null.h | 13 #define __mutex_fastpath_lock(count, fail_fn) fail_fn(count) argument 14 #define __mutex_fastpath_lock_retval(count, fail_fn) fail_fn(count) argument 15 #define __mutex_fastpath_unlock(count, fail_fn) fail_fn(count) argument 16 #define __mutex_fastpath_trylock(count, fail_fn) fail_fn(count) argument
|
D | ide_iops.h | 8 static __inline__ void __ide_mm_insw(void __iomem *port, void *addr, u32 count) in __ide_mm_insw() argument 10 while (count--) { in __ide_mm_insw() 16 static __inline__ void __ide_mm_insl(void __iomem *port, void *addr, u32 count) in __ide_mm_insl() argument 18 while (count--) { in __ide_mm_insl() 24 static __inline__ void __ide_mm_outsw(void __iomem *port, void *addr, u32 count) in __ide_mm_outsw() argument 26 while (count--) { in __ide_mm_outsw() 32 static __inline__ void __ide_mm_outsl(void __iomem * port, void *addr, u32 count) in __ide_mm_outsl() argument 34 while (count--) { in __ide_mm_outsl()
|
D | rwsem.h | 36 if (unlikely(atomic_long_inc_return((atomic_long_t *)&sem->count) <= 0)) in __down_read() 44 while ((tmp = sem->count) >= 0) { in __down_read_trylock() 45 if (tmp == cmpxchg(&sem->count, tmp, in __down_read_trylock() 61 (atomic_long_t *)&sem->count); in __down_write_nested() 75 tmp = cmpxchg(&sem->count, RWSEM_UNLOCKED_VALUE, in __down_write_trylock() 87 tmp = atomic_long_dec_return((atomic_long_t *)&sem->count); in __up_read() 98 (atomic_long_t *)&sem->count) < 0)) in __up_write() 107 atomic_long_add(delta, (atomic_long_t *)&sem->count); in rwsem_atomic_add() 118 (atomic_long_t *)&sem->count); in __downgrade_write() 128 return atomic_long_add_return(delta, (atomic_long_t *)&sem->count); in rwsem_atomic_update()
|
D | io.h | 146 static inline void insb(unsigned long addr, void *buffer, int count) in insb() argument 148 if (count) { in insb() 153 } while (--count); in insb() 159 static inline void insw(unsigned long addr, void *buffer, int count) in insw() argument 161 if (count) { in insw() 166 } while (--count); in insw() 172 static inline void insl(unsigned long addr, void *buffer, int count) in insl() argument 174 if (count) { in insl() 179 } while (--count); in insl() 185 static inline void outsb(unsigned long addr, const void *buffer, int count) in outsb() argument [all …]
|
D | iomap.h | 51 extern void ioread8_rep(void __iomem *port, void *buf, unsigned long count); 52 extern void ioread16_rep(void __iomem *port, void *buf, unsigned long count); 53 extern void ioread32_rep(void __iomem *port, void *buf, unsigned long count); 55 extern void iowrite8_rep(void __iomem *port, const void *buf, unsigned long count); 56 extern void iowrite16_rep(void __iomem *port, const void *buf, unsigned long count); 57 extern void iowrite32_rep(void __iomem *port, const void *buf, unsigned long count);
|
/include/linux/ |
D | vt_buffer.h | 31 static inline void scr_memsetw(u16 *s, u16 c, unsigned int count) in scr_memsetw() argument 33 count /= 2; in scr_memsetw() 34 while (count--) in scr_memsetw() 40 static inline void scr_memcpyw(u16 *d, const u16 *s, unsigned int count) in scr_memcpyw() argument 42 count /= 2; in scr_memcpyw() 43 while (count--) in scr_memcpyw() 49 static inline void scr_memmovew(u16 *d, const u16 *s, unsigned int count) in scr_memmovew() argument 52 scr_memcpyw(d, s, count); in scr_memmovew() 54 count /= 2; in scr_memmovew() 55 d += count; in scr_memmovew() [all …]
|
D | regset.h | 60 unsigned int pos, unsigned int count, 81 unsigned int pos, unsigned int count, 220 static inline int user_regset_copyout(unsigned int *pos, unsigned int *count, in user_regset_copyout() argument 225 if (*count == 0) in user_regset_copyout() 229 unsigned int copy = (end_pos < 0 ? *count in user_regset_copyout() 230 : min(*count, end_pos - *pos)); in user_regset_copyout() 240 *count -= copy; in user_regset_copyout() 245 static inline int user_regset_copyin(unsigned int *pos, unsigned int *count, in user_regset_copyin() argument 250 if (*count == 0) in user_regset_copyin() 254 unsigned int copy = (end_pos < 0 ? *count in user_regset_copyin() [all …]
|
D | dynamic_queue_limits.h | 72 static inline void dql_queued(struct dql *dql, unsigned int count) in dql_queued() argument 74 BUG_ON(count > DQL_MAX_OBJECT); in dql_queued() 76 dql->num_queued += count; in dql_queued() 77 dql->last_obj_cnt = count; in dql_queued() 87 void dql_completed(struct dql *dql, unsigned int count);
|
D | percpu_counter.h | 20 s64 count; member 63 return fbc->count; in percpu_counter_read() 73 s64 ret = fbc->count; in percpu_counter_read_positive() 89 s64 count; member 94 fbc->count = amount; in percpu_counter_init() 104 fbc->count = amount; in percpu_counter_set() 109 if (fbc->count > rhs) in percpu_counter_compare() 111 else if (fbc->count < rhs) in percpu_counter_compare() 121 fbc->count += amount; in percpu_counter_add() 133 return fbc->count; in percpu_counter_read() [all …]
|
D | nsproxy.h | 26 atomic_t count; member 75 if (atomic_dec_and_test(&ns->count)) { in put_nsproxy() 82 atomic_inc(&ns->count); in get_nsproxy()
|
D | seq_file.h | 21 size_t count; member 50 BUG_ON(m->count > m->size); in seq_get_buf() 51 if (m->count < m->size) in seq_get_buf() 52 *bufp = m->buf + m->count; in seq_get_buf() 56 return m->size - m->count; in seq_get_buf() 71 m->count = m->size; in seq_commit() 73 BUG_ON(m->count + num > m->size); in seq_commit() 74 m->count += num; in seq_commit()
|
D | bitops.h | 46 static __inline__ int get_bitmask_order(unsigned int count) in get_bitmask_order() argument 50 order = fls(count); in get_bitmask_order() 54 static __inline__ int get_count_order(unsigned int count) in get_count_order() argument 58 order = fls(count) - 1; in get_count_order() 59 if (count & (count - 1)) in get_count_order()
|
D | posix_acl_xattr.h | 38 posix_acl_xattr_size(int count) in posix_acl_xattr_size() argument 41 (count * sizeof(posix_acl_xattr_entry))); in posix_acl_xattr_size()
|
/include/net/netfilter/ |
D | xt_log.h | 4 unsigned int count; member 14 if (likely(m->count < S_SIZE)) { in sb_add() 16 len = vsnprintf(m->buf + m->count, S_SIZE - m->count, f, args); in sb_add() 18 if (likely(m->count + len < S_SIZE)) { in sb_add() 19 m->count += len; in sb_add() 23 m->count = S_SIZE; in sb_add() 38 m->count = 0; in sb_open() 44 m->buf[m->count] = 0; in sb_close()
|
D | nf_conntrack_tuple.h | 159 int count; in nf_ct_tuple_src_mask_cmp() local 161 for (count = 0; count < NF_CT_TUPLE_L3SIZE; count++) { in nf_ct_tuple_src_mask_cmp() 162 if ((t1->src.u3.all[count] ^ t2->src.u3.all[count]) & in nf_ct_tuple_src_mask_cmp() 163 mask->src.u3.all[count]) in nf_ct_tuple_src_mask_cmp()
|
/include/trace/events/ |
D | regmap.h | 69 TP_PROTO(struct device *dev, unsigned int reg, int count), 71 TP_ARGS(dev, reg, count), 76 __field( int, count ) 82 __entry->count = count; 87 (int)__entry->count) 92 TP_PROTO(struct device *dev, unsigned int reg, int count), 94 TP_ARGS(dev, reg, count) 99 TP_PROTO(struct device *dev, unsigned int reg, int count), 101 TP_ARGS(dev, reg, count) 106 TP_PROTO(struct device *dev, unsigned int reg, int count), [all …]
|
/include/xen/ |
D | gntdev.h | 53 uint32_t count; member 75 uint32_t count; member 101 uint32_t count; member 116 uint32_t count; member
|
D | gntalloc.h | 26 uint32_t count; member 48 uint32_t count; member
|
/include/linux/netfilter/ |
D | xt_multiport.h | 17 __u8 count; /* Number of ports */ member 23 __u8 count; /* Number of ports */ member
|
/include/sound/ |
D | rawmidi.h | 162 const unsigned char *buffer, int count); 166 unsigned char *buffer, int count); 167 int snd_rawmidi_transmit_ack(struct snd_rawmidi_substream *substream, int count); 169 unsigned char *buffer, int count); 185 unsigned char *buf, long count); 187 const unsigned char *buf, long count);
|
D | i2c.h | 49 int (*sendbytes)(struct snd_i2c_device *device, unsigned char *bytes, int count); 50 int (*readbytes)(struct snd_i2c_device *device, unsigned char *bytes, int count); 100 int snd_i2c_sendbytes(struct snd_i2c_device *device, unsigned char *bytes, int count); 101 int snd_i2c_readbytes(struct snd_i2c_device *device, unsigned char *bytes, int count);
|
/include/crypto/ |
D | sha.h | 68 u64 count; member 74 u64 count; member 80 u64 count[2]; member
|
/include/drm/ |
D | savage_drm.h | 124 unsigned int count; member 180 unsigned short count; /* number of consecutive registers */ member 188 unsigned short count; /* number of vertices */ member 195 unsigned short count; /* number of indices that follow */ member
|