| /include/asm-generic/bitops/ |
| D | instrumented-non-atomic.h | 26 ___set_bit(unsigned long nr, volatile unsigned long *addr) in ___set_bit() argument 28 instrument_write(addr + BIT_WORD(nr), sizeof(long)); in ___set_bit() 29 arch___set_bit(nr, addr); in ___set_bit() 42 ___clear_bit(unsigned long nr, volatile unsigned long *addr) in ___clear_bit() argument 44 instrument_write(addr + BIT_WORD(nr), sizeof(long)); in ___clear_bit() 45 arch___clear_bit(nr, addr); in ___clear_bit() 58 ___change_bit(unsigned long nr, volatile unsigned long *addr) in ___change_bit() argument 60 instrument_write(addr + BIT_WORD(nr), sizeof(long)); in ___change_bit() 61 arch___change_bit(nr, addr); in ___change_bit() 64 static __always_inline void __instrument_read_write_bitop(long nr, volatile unsigned long *addr) in __instrument_read_write_bitop() argument [all …]
|
| D | le.h | 19 static inline int test_bit_le(int nr, const void *addr) in test_bit_le() argument 21 return test_bit(nr ^ BITOP_LE_SWIZZLE, addr); in test_bit_le() 24 static inline void set_bit_le(int nr, void *addr) in set_bit_le() argument 26 set_bit(nr ^ BITOP_LE_SWIZZLE, addr); in set_bit_le() 29 static inline void clear_bit_le(int nr, void *addr) in clear_bit_le() argument 31 clear_bit(nr ^ BITOP_LE_SWIZZLE, addr); in clear_bit_le() 34 static inline void __set_bit_le(int nr, void *addr) in __set_bit_le() argument 36 __set_bit(nr ^ BITOP_LE_SWIZZLE, addr); in __set_bit_le() 39 static inline void __clear_bit_le(int nr, void *addr) in __clear_bit_le() argument 41 __clear_bit(nr ^ BITOP_LE_SWIZZLE, addr); in __clear_bit_le() [all …]
|
| D | instrumented-atomic.h | 26 static __always_inline void set_bit(long nr, volatile unsigned long *addr) in set_bit() argument 28 instrument_atomic_write(addr + BIT_WORD(nr), sizeof(long)); in set_bit() 29 arch_set_bit(nr, addr); in set_bit() 39 static __always_inline void clear_bit(long nr, volatile unsigned long *addr) in clear_bit() argument 41 instrument_atomic_write(addr + BIT_WORD(nr), sizeof(long)); in clear_bit() 42 arch_clear_bit(nr, addr); in clear_bit() 55 static __always_inline void change_bit(long nr, volatile unsigned long *addr) in change_bit() argument 57 instrument_atomic_write(addr + BIT_WORD(nr), sizeof(long)); in change_bit() 58 arch_change_bit(nr, addr); in change_bit() 68 static __always_inline bool test_and_set_bit(long nr, volatile unsigned long *addr) in test_and_set_bit() argument [all …]
|
| D | generic-non-atomic.h | 28 generic___set_bit(unsigned long nr, volatile unsigned long *addr) in generic___set_bit() argument 30 unsigned long mask = BIT_MASK(nr); in generic___set_bit() 31 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); in generic___set_bit() 37 generic___clear_bit(unsigned long nr, volatile unsigned long *addr) in generic___clear_bit() argument 39 unsigned long mask = BIT_MASK(nr); in generic___clear_bit() 40 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); in generic___clear_bit() 55 generic___change_bit(unsigned long nr, volatile unsigned long *addr) in generic___change_bit() argument 57 unsigned long mask = BIT_MASK(nr); in generic___change_bit() 58 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); in generic___change_bit() 73 generic___test_and_set_bit(unsigned long nr, volatile unsigned long *addr) in generic___test_and_set_bit() argument [all …]
|
| D | atomic.h | 15 arch_set_bit(unsigned int nr, volatile unsigned long *p) in arch_set_bit() argument 17 p += BIT_WORD(nr); in arch_set_bit() 18 raw_atomic_long_or(BIT_MASK(nr), (atomic_long_t *)p); in arch_set_bit() 22 arch_clear_bit(unsigned int nr, volatile unsigned long *p) in arch_clear_bit() argument 24 p += BIT_WORD(nr); in arch_clear_bit() 25 raw_atomic_long_andnot(BIT_MASK(nr), (atomic_long_t *)p); in arch_clear_bit() 29 arch_change_bit(unsigned int nr, volatile unsigned long *p) in arch_change_bit() argument 31 p += BIT_WORD(nr); in arch_change_bit() 32 raw_atomic_long_xor(BIT_MASK(nr), (atomic_long_t *)p); in arch_change_bit() 36 arch_test_and_set_bit(unsigned int nr, volatile unsigned long *p) in arch_test_and_set_bit() argument [all …]
|
| D | instrumented-lock.h | 23 static inline void clear_bit_unlock(long nr, volatile unsigned long *addr) in clear_bit_unlock() argument 26 instrument_atomic_write(addr + BIT_WORD(nr), sizeof(long)); in clear_bit_unlock() 27 arch_clear_bit_unlock(nr, addr); in clear_bit_unlock() 39 static inline void __clear_bit_unlock(long nr, volatile unsigned long *addr) in __clear_bit_unlock() argument 42 instrument_write(addr + BIT_WORD(nr), sizeof(long)); in __clear_bit_unlock() 43 arch___clear_bit_unlock(nr, addr); in __clear_bit_unlock() 55 static inline bool test_and_set_bit_lock(long nr, volatile unsigned long *addr) in test_and_set_bit_lock() argument 57 instrument_atomic_read_write(addr + BIT_WORD(nr), sizeof(long)); in test_and_set_bit_lock() 58 return arch_test_and_set_bit_lock(nr, addr); in test_and_set_bit_lock()
|
| D | lock.h | 19 arch_test_and_set_bit_lock(unsigned int nr, volatile unsigned long *p) in arch_test_and_set_bit_lock() argument 22 unsigned long mask = BIT_MASK(nr); in arch_test_and_set_bit_lock() 24 p += BIT_WORD(nr); in arch_test_and_set_bit_lock() 41 arch_clear_bit_unlock(unsigned int nr, volatile unsigned long *p) in arch_clear_bit_unlock() argument 43 p += BIT_WORD(nr); in arch_clear_bit_unlock() 44 raw_atomic_long_fetch_andnot_release(BIT_MASK(nr), (atomic_long_t *)p); in arch_clear_bit_unlock() 59 arch___clear_bit_unlock(unsigned int nr, volatile unsigned long *p) in arch___clear_bit_unlock() argument 63 p += BIT_WORD(nr); in arch___clear_bit_unlock() 65 old &= ~BIT_MASK(nr); in arch___clear_bit_unlock()
|
| D | ext2-atomic-setbit.h | 9 #define ext2_set_bit_atomic(l, nr, addr) test_and_set_bit_le(nr, addr) argument 10 #define ext2_clear_bit_atomic(l, nr, addr) test_and_clear_bit_le(nr, addr) argument
|
| D | ext2-atomic.h | 9 #define ext2_set_bit_atomic(lock, nr, addr) \ argument 13 ret = __test_and_set_bit_le(nr, addr); \ 18 #define ext2_clear_bit_atomic(lock, nr, addr) \ argument 22 ret = __test_and_clear_bit_le(nr, addr); \
|
| /include/uapi/asm-generic/ |
| D | ioctl.h | 69 #define _IOC(dir,type,nr,size) \ argument 72 ((nr) << _IOC_NRSHIFT) | \ 85 #define _IO(type,nr) _IOC(_IOC_NONE,(type),(nr),0) argument 86 #define _IOR(type,nr,size) _IOC(_IOC_READ,(type),(nr),(_IOC_TYPECHECK(size))) argument 87 #define _IOW(type,nr,size) _IOC(_IOC_WRITE,(type),(nr),(_IOC_TYPECHECK(size))) argument 88 #define _IOWR(type,nr,size) _IOC(_IOC_READ|_IOC_WRITE,(type),(nr),(_IOC_TYPECHECK(size))) argument 89 #define _IOR_BAD(type,nr,size) _IOC(_IOC_READ,(type),(nr),sizeof(size)) argument 90 #define _IOW_BAD(type,nr,size) _IOC(_IOC_WRITE,(type),(nr),sizeof(size)) argument 91 #define _IOWR_BAD(type,nr,size) _IOC(_IOC_READ|_IOC_WRITE,(type),(nr),sizeof(size)) argument 94 #define _IOC_DIR(nr) (((nr) >> _IOC_DIRSHIFT) & _IOC_DIRMASK) argument [all …]
|
| /include/linux/ |
| D | bitops.h | 12 #define BITS_TO_LONGS(nr) __KERNEL_DIV_ROUND_UP(nr, BITS_PER_TYPE(long)) argument 13 #define BITS_TO_U64(nr) __KERNEL_DIV_ROUND_UP(nr, BITS_PER_TYPE(u64)) argument 14 #define BITS_TO_U32(nr) __KERNEL_DIV_ROUND_UP(nr, BITS_PER_TYPE(u32)) argument 15 #define BITS_TO_BYTES(nr) __KERNEL_DIV_ROUND_UP(nr, BITS_PER_TYPE(char)) argument 43 #define bitop(op, nr, addr) \ argument 44 ((__builtin_constant_p(nr) && \ 48 const##op(nr, addr) : op(nr, addr)) 54 #define __set_bit(nr, addr) bitop(___set_bit, nr, addr) argument 55 #define __clear_bit(nr, addr) bitop(___clear_bit, nr, addr) argument 56 #define __change_bit(nr, addr) bitop(___change_bit, nr, addr) argument [all …]
|
| D | bits.h | 10 #define BIT_MASK(nr) (UL(1) << ((nr) % BITS_PER_LONG)) argument 11 #define BIT_WORD(nr) ((nr) / BITS_PER_LONG) argument 12 #define BIT_ULL_MASK(nr) (ULL(1) << ((nr) % BITS_PER_LONG_LONG)) argument 13 #define BIT_ULL_WORD(nr) ((nr) / BITS_PER_LONG_LONG) argument
|
| D | page_ref.h | 118 static inline void page_ref_add(struct page *page, int nr) in page_ref_add() argument 120 atomic_add(nr, &page->_refcount); in page_ref_add() 122 __page_ref_mod(page, nr); in page_ref_add() 125 static inline void folio_ref_add(struct folio *folio, int nr) in folio_ref_add() argument 127 page_ref_add(&folio->page, nr); in folio_ref_add() 130 static inline void page_ref_sub(struct page *page, int nr) in page_ref_sub() argument 132 atomic_sub(nr, &page->_refcount); in page_ref_sub() 134 __page_ref_mod(page, -nr); in page_ref_sub() 137 static inline void folio_ref_sub(struct folio *folio, int nr) in folio_ref_sub() argument 139 page_ref_sub(&folio->page, nr); in folio_ref_sub() [all …]
|
| D | pagevec.h | 29 unsigned char nr; member 43 fbatch->nr = 0; in folio_batch_init() 50 fbatch->nr = 0; in folio_batch_reinit() 56 return fbatch->nr; in folio_batch_count() 61 return PAGEVEC_SIZE - fbatch->nr; in folio_batch_space() 77 fbatch->folios[fbatch->nr++] = folio; in folio_batch_add() 91 if (fbatch->i == fbatch->nr) in folio_batch_next()
|
| D | quotaops.h | 297 static inline int dquot_alloc_space_nodirty(struct inode *inode, qsize_t nr) in dquot_alloc_space_nodirty() argument 299 return __dquot_alloc_space(inode, nr, DQUOT_SPACE_WARN); in dquot_alloc_space_nodirty() 302 static inline void dquot_alloc_space_nofail(struct inode *inode, qsize_t nr) in dquot_alloc_space_nofail() argument 304 __dquot_alloc_space(inode, nr, DQUOT_SPACE_WARN|DQUOT_SPACE_NOFAIL); in dquot_alloc_space_nofail() 308 static inline int dquot_alloc_space(struct inode *inode, qsize_t nr) in dquot_alloc_space() argument 312 ret = dquot_alloc_space_nodirty(inode, nr); in dquot_alloc_space() 324 static inline int dquot_alloc_block_nodirty(struct inode *inode, qsize_t nr) in dquot_alloc_block_nodirty() argument 326 return dquot_alloc_space_nodirty(inode, nr << inode->i_blkbits); in dquot_alloc_block_nodirty() 329 static inline void dquot_alloc_block_nofail(struct inode *inode, qsize_t nr) in dquot_alloc_block_nofail() argument 331 dquot_alloc_space_nofail(inode, nr << inode->i_blkbits); in dquot_alloc_block_nofail() [all …]
|
| D | virtio_balloon.h | 18 void (*page_relinquish)(struct page *page, unsigned int nr); 32 static inline void page_relinquish(struct page *page, unsigned int nr) in page_relinquish() argument 36 return virtio_balloon_hyp_ops->page_relinquish(page, nr); in page_relinquish() 48 static inline void page_relinquish(struct page *page, unsigned int nr) { } in page_relinquish() argument
|
| D | percpu-refcount.h | 198 static inline void percpu_ref_get_many(struct percpu_ref *ref, unsigned long nr) in percpu_ref_get_many() argument 205 this_cpu_add(*percpu_count, nr); in percpu_ref_get_many() 207 atomic_long_add(nr, &ref->data->count); in percpu_ref_get_many() 236 unsigned long nr) in percpu_ref_tryget_many() argument 244 this_cpu_add(*percpu_count, nr); in percpu_ref_tryget_many() 247 ret = atomic_long_add_unless(&ref->data->count, nr, 0); in percpu_ref_tryget_many() 326 static inline void percpu_ref_put_many(struct percpu_ref *ref, unsigned long nr) in percpu_ref_put_many() argument 333 this_cpu_sub(*percpu_count, nr); in percpu_ref_put_many() 334 else if (unlikely(atomic_long_sub_and_test(nr, &ref->data->count))) in percpu_ref_put_many()
|
| D | min_heap.h | 18 int nr; \ 45 heap->nr = 0; in __min_heap_init() 60 return heap->nr ? heap->data : NULL; in __min_heap_peek() 70 return heap->nr == heap->size; in __min_heap_full() 88 if (i * 2 + 2 >= heap->nr) in __min_heap_sift_down() 96 if (i * 2 + 2 == heap->nr) in __min_heap_sift_down() 141 for (i = heap->nr / 2 - 1; i >= 0; i--) in __min_heapify_all() 155 if (WARN_ONCE(heap->nr <= 0, "Popping an empty heap")) in __min_heap_pop() 159 heap->nr--; in __min_heap_pop() 160 memcpy(data, data + (heap->nr * elem_size), elem_size); in __min_heap_pop() [all …]
|
| D | sbitmap.h | 257 unsigned int nr; in __sbitmap_for_each_set() local 263 nr = SB_NR_TO_BIT(sb, start); in __sbitmap_for_each_set() 268 __map_depth(sb, index) - nr, in __sbitmap_for_each_set() 281 depth += nr; in __sbitmap_for_each_set() 283 nr = find_next_bit(&word, depth, nr); in __sbitmap_for_each_set() 284 if (nr >= depth) in __sbitmap_for_each_set() 286 if (!fn(sb, (index << sb->shift) + nr, data)) in __sbitmap_for_each_set() 289 nr++; in __sbitmap_for_each_set() 292 nr = 0; in __sbitmap_for_each_set() 504 int nr; in sbitmap_queue_get() local [all …]
|
| D | udp.h | 100 #define udp_test_bit(nr, sk) \ argument 101 test_bit(UDP_FLAGS_##nr, &udp_sk(sk)->udp_flags) 102 #define udp_set_bit(nr, sk) \ argument 103 set_bit(UDP_FLAGS_##nr, &udp_sk(sk)->udp_flags) 104 #define udp_test_and_set_bit(nr, sk) \ argument 105 test_and_set_bit(UDP_FLAGS_##nr, &udp_sk(sk)->udp_flags) 106 #define udp_clear_bit(nr, sk) \ argument 107 clear_bit(UDP_FLAGS_##nr, &udp_sk(sk)->udp_flags) 108 #define udp_assign_bit(nr, sk, val) \ argument 109 assign_bit(UDP_FLAGS_##nr, &udp_sk(sk)->udp_flags, val)
|
| /include/vdso/ |
| D | bits.h | 7 #define BIT(nr) (UL(1) << (nr)) argument 8 #define BIT_ULL(nr) (ULL(1) << (nr)) argument
|
| /include/uapi/linux/ |
| D | seccomp.h | 63 int nr; member 141 #define SECCOMP_IO(nr) _IO(SECCOMP_IOC_MAGIC, nr) argument 142 #define SECCOMP_IOR(nr, type) _IOR(SECCOMP_IOC_MAGIC, nr, type) argument 143 #define SECCOMP_IOW(nr, type) _IOW(SECCOMP_IOC_MAGIC, nr, type) argument 144 #define SECCOMP_IOWR(nr, type) _IOWR(SECCOMP_IOC_MAGIC, nr, type) argument
|
| /include/xen/ |
| D | xen-ops.h | 48 xen_pfn_t *pfn, int nr, int *err_ptr, pgprot_t prot, 52 xen_pfn_t *pfn, int nr, int *err_ptr, in xen_remap_pfn() argument 66 xen_pfn_t *gfn, int nr, 71 int nr, struct page **pages); 79 xen_pfn_t *gfn, int nr, in xen_xlate_remap_gfn_array() argument 88 int nr, struct page **pages) in xen_xlate_unmap_gfn_range() argument 116 xen_pfn_t *gfn, int nr, in xen_remap_domain_gfn_array() argument 122 return xen_xlate_remap_gfn_array(vma, addr, gfn, nr, err_ptr, in xen_remap_domain_gfn_array() 130 return xen_remap_pfn(vma, addr, gfn, nr, err_ptr, prot, domid, in xen_remap_domain_gfn_array() 152 int nr, int *err_ptr, in xen_remap_domain_mfn_array() argument [all …]
|
| /include/linux/dma/ |
| D | hsu.h | 41 int hsu_dma_get_status(struct hsu_dma_chip *chip, unsigned short nr, 43 int hsu_dma_do_irq(struct hsu_dma_chip *chip, unsigned short nr, u32 status); 50 unsigned short nr, u32 *status) in hsu_dma_get_status() argument 54 static inline int hsu_dma_do_irq(struct hsu_dma_chip *chip, unsigned short nr, in hsu_dma_do_irq() argument
|
| /include/net/page_pool/ |
| D | helpers.h | 217 static inline void page_pool_fragment_netmem(netmem_ref netmem, long nr) in page_pool_fragment_netmem() argument 219 atomic_long_set(netmem_get_pp_ref_count_ref(netmem), nr); in page_pool_fragment_netmem() 240 static inline void page_pool_fragment_page(struct page *page, long nr) in page_pool_fragment_page() argument 242 page_pool_fragment_netmem(page_to_netmem(page), nr); in page_pool_fragment_page() 245 static inline long page_pool_unref_netmem(netmem_ref netmem, long nr) in page_pool_unref_netmem() argument 264 if (atomic_long_read(pp_ref_count) == nr) { in page_pool_unref_netmem() 269 BUILD_BUG_ON(__builtin_constant_p(nr) && nr != 1); in page_pool_unref_netmem() 270 if (!__builtin_constant_p(nr)) in page_pool_unref_netmem() 276 ret = atomic_long_sub_return(nr, pp_ref_count); in page_pool_unref_netmem() 290 static inline long page_pool_unref_page(struct page *page, long nr) in page_pool_unref_page() argument [all …]
|