/lib/ |
D | find_bit.c | 32 unsigned long nbits, unsigned long start, unsigned long invert) in _find_next_bit() argument 36 if (!nbits || start >= nbits) in _find_next_bit() 39 tmp = addr[start / BITS_PER_LONG] ^ invert; in _find_next_bit() 42 tmp &= BITMAP_FIRST_WORD_MASK(start); in _find_next_bit() 43 start = round_down(start, BITS_PER_LONG); in _find_next_bit() 46 start += BITS_PER_LONG; in _find_next_bit() 47 if (start >= nbits) in _find_next_bit() 50 tmp = addr[start / BITS_PER_LONG] ^ invert; in _find_next_bit() 53 return min(start + __ffs(tmp), nbits); in _find_next_bit() 150 unsigned long nbits, unsigned long start, unsigned long invert) in _find_next_bit_le() argument [all …]
|
D | iommu-common.c | 58 unsigned int start, i; in iommu_tbl_pool_init() local 70 start = 0; in iommu_tbl_pool_init() 82 iommu->pools[i].start = start; in iommu_tbl_pool_init() 83 iommu->pools[i].hint = start; in iommu_tbl_pool_init() 84 start += iommu->poolsize; /* start for next pool */ in iommu_tbl_pool_init() 85 iommu->pools[i].end = start - 1; in iommu_tbl_pool_init() 91 p->start = start; in iommu_tbl_pool_init() 92 p->hint = p->start; in iommu_tbl_pool_init() 105 unsigned long n, end, start, limit, boundary_size; in iommu_tbl_range_alloc() local 137 (*handle >= pool->start) && (*handle < pool->end)) in iommu_tbl_range_alloc() [all …]
|
D | pci_iomap.c | 32 resource_size_t start = pci_resource_start(dev, bar); in pci_iomap_range() local 36 if (len <= offset || !start) in pci_iomap_range() 39 start += offset; in pci_iomap_range() 43 return __pci_ioport_map(dev, start, len); in pci_iomap_range() 45 return ioremap(start, len); in pci_iomap_range() 72 resource_size_t start = pci_resource_start(dev, bar); in pci_iomap_wc_range() local 80 if (len <= offset || !start) in pci_iomap_wc_range() 84 start += offset; in pci_iomap_wc_range() 89 return ioremap_wc(start, len); in pci_iomap_wc_range()
|
D | bsearch.c | 37 size_t start = 0, end = num; in bsearch() local 40 while (start < end) { in bsearch() 41 size_t mid = start + (end - start) / 2; in bsearch() 47 start = mid + 1; in bsearch()
|
D | genalloc.c | 86 static int bitmap_set_ll(unsigned long *map, unsigned long start, unsigned long nr) in bitmap_set_ll() argument 88 unsigned long *p = map + BIT_WORD(start); in bitmap_set_ll() 89 const unsigned long size = start + nr; in bitmap_set_ll() 90 int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG); in bitmap_set_ll() 91 unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start); in bitmap_set_ll() 122 bitmap_clear_ll(unsigned long *map, unsigned long start, unsigned long nr) in bitmap_clear_ll() argument 124 unsigned long *p = map + BIT_WORD(start); in bitmap_clear_ll() 125 const unsigned long size = start + nr; in bitmap_clear_ll() 126 int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG); in bitmap_clear_ll() 127 unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start); in bitmap_clear_ll() [all …]
|
D | crc32.c | 866 u32 start; /* random 6 bit offset in buf */ member 982 struct timespec start, stop; local 995 test[i].start, test[i].length); 1002 getnstimeofday(&start); 1005 test[i].start, test[i].length)) 1013 nsec = stop.tv_nsec - start.tv_nsec + 1014 1000000000 * (stop.tv_sec - start.tv_sec); 1036 crc_full = __crc32c_le(test[i].crc, test_buf + test[i].start, 1043 test[i].start, len1); 1044 crc2 = __crc32c_le(0, test_buf + test[i].start + [all …]
|
D | interval_tree_test.c | 29 search(struct rb_root *root, unsigned long start, unsigned long last) in search() argument 34 for (node = interval_tree_iter_first(root, start, last); node; in search() 35 node = interval_tree_iter_next(node, start, last)) in search() 48 nodes[i].start = a; in init() 107 unsigned long start = search_all ? 0 : queries[j]; in interval_tree_test_init() local 110 results += search(&root, start, last); in interval_tree_test_init()
|
D | iommu-helper.c | 20 unsigned long start, unsigned int nr, in iommu_area_alloc() argument 29 index = bitmap_find_next_zero_area(map, size, start, nr, align_mask); in iommu_area_alloc() 33 start = index + 1; in iommu_area_alloc()
|
D | string.c | 990 static void *check_bytes8(const u8 *start, u8 value, unsigned int bytes) in check_bytes8() argument 993 if (*start != value) in check_bytes8() 994 return (void *)start; in check_bytes8() 995 start++; in check_bytes8() 1010 void *memchr_inv(const void *start, int c, size_t bytes) in memchr_inv() argument 1017 return check_bytes8(start, value, bytes); in memchr_inv() 1031 prefix = (unsigned long)start % 8; in memchr_inv() 1036 r = check_bytes8(start, value, prefix); in memchr_inv() 1039 start += prefix; in memchr_inv() 1046 if (*(u64 *)start != value64) in memchr_inv() [all …]
|
D | bitmap.c | 253 void bitmap_set(unsigned long *map, unsigned int start, int len) in bitmap_set() argument 255 unsigned long *p = map + BIT_WORD(start); in bitmap_set() 256 const unsigned int size = start + len; in bitmap_set() 257 int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG); in bitmap_set() 258 unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start); in bitmap_set() 274 void bitmap_clear(unsigned long *map, unsigned int start, int len) in bitmap_clear() argument 276 unsigned long *p = map + BIT_WORD(start); in bitmap_clear() 277 const unsigned int size = start + len; in bitmap_clear() 278 int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG); in bitmap_clear() 279 unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start); in bitmap_clear() [all …]
|
D | extable.c | 66 void sort_extable(struct exception_table_entry *start, in sort_extable() argument 69 sort(start, finish - start, sizeof(struct exception_table_entry), in sort_extable()
|
D | nlattr.c | 369 void *start; in __nla_reserve_nohdr() local 371 start = skb_put(skb, NLA_ALIGN(attrlen)); in __nla_reserve_nohdr() 372 memset(start, 0, NLA_ALIGN(attrlen)); in __nla_reserve_nohdr() 374 return start; in __nla_reserve_nohdr() 449 void *start; in __nla_put_nohdr() local 451 start = __nla_reserve_nohdr(skb, attrlen); in __nla_put_nohdr() 452 memcpy(start, data, attrlen); in __nla_put_nohdr()
|
D | cpumask.c | 57 int cpumask_next_wrap(int n, const struct cpumask *mask, int start, bool wrap) in cpumask_next_wrap() argument 64 if (wrap && n < start && next >= start) { in cpumask_next_wrap()
|
D | nmi_backtrace.c | 39 static void print_seq_line(struct nmi_seq_buf *s, int start, int end) in print_seq_line() argument 41 const char *buf = s->buffer + start; in print_seq_line() 43 printk("%.*s", (end - start) + 1, buf); in print_seq_line()
|
D | flex_array.c | 268 int flex_array_prealloc(struct flex_array *fa, unsigned int start, in flex_array_prealloc() argument 277 if (!start && !nr_elements) in flex_array_prealloc() 279 if (start >= fa->total_nr_elements) in flex_array_prealloc() 284 end = start + nr_elements - 1; in flex_array_prealloc() 292 start_part = fa_element_to_part_nr(fa, start); in flex_array_prealloc()
|
D | ioremap.c | 129 unsigned long start; in ioremap_page_range() local 135 start = addr; in ioremap_page_range() 145 flush_cache_vmap(start, end); in ioremap_page_range()
|
D | idr.c | 450 int idr_alloc(struct idr *idr, void *ptr, int start, int end, gfp_t gfp_mask) in idr_alloc() argument 459 if (WARN_ON_ONCE(start < 0)) in idr_alloc() 461 if (unlikely(max < start)) in idr_alloc() 465 id = idr_get_empty_slot(idr, start, pa, gfp_mask, NULL); in idr_alloc() 488 int idr_alloc_cyclic(struct idr *idr, void *ptr, int start, int end, in idr_alloc_cyclic() argument 493 id = idr_alloc(idr, ptr, max(start, idr->cur), end, gfp_mask); in idr_alloc_cyclic() 495 id = idr_alloc(idr, ptr, start, end, gfp_mask); in idr_alloc_cyclic() 1078 int ida_simple_get(struct ida *ida, unsigned int start, unsigned int end, in ida_simple_get() argument 1085 BUG_ON((int)start < 0); in ida_simple_get() 1091 BUG_ON(end < start); in ida_simple_get() [all …]
|
D | interval_tree.c | 6 #define START(node) ((node)->start)
|
D | test_rhashtable.c | 161 s64 start, end; in test_rhashtable() local 168 start = ktime_get_ns(); in test_rhashtable() 212 pr_info(" Duration of test: %lld ns\n", end - start); in test_rhashtable() 214 return end - start; in test_rhashtable()
|
D | devres.c | 151 if (!devm_request_mem_region(dev, res->start, size, name)) { in devm_ioremap_resource() 156 dest_ptr = devm_ioremap(dev, res->start, size); in devm_ioremap_resource() 159 devm_release_mem_region(dev, res->start, size); in devm_ioremap_resource()
|
/lib/raid6/ |
D | sse2.c | 92 static void raid6_sse21_xor_syndrome(int disks, int start, int stop, in raid6_sse21_xor_syndrome() argument 112 for ( z = z0-1 ; z >= start ; z-- ) { in raid6_sse21_xor_syndrome() 123 for ( z = start-1 ; z >= 0 ; z-- ) { in raid6_sse21_xor_syndrome() 203 static void raid6_sse22_xor_syndrome(int disks, int start, int stop, in raid6_sse22_xor_syndrome() argument 226 for ( z = z0-1 ; z >= start ; z-- ) { in raid6_sse22_xor_syndrome() 245 for ( z = start-1 ; z >= 0 ; z-- ) { in raid6_sse22_xor_syndrome() 369 static void raid6_sse24_xor_syndrome(int disks, int start, int stop, in raid6_sse24_xor_syndrome() argument 398 for ( z = z0-1 ; z >= start ; z-- ) { in raid6_sse24_xor_syndrome() 437 for ( z = start-1 ; z >= 0 ; z-- ) { in raid6_sse24_xor_syndrome()
|
D | neon.c | 44 int start, int stop, \ 51 start, stop, (unsigned long)bytes, ptrs); \
|
D | neon.uc | 83 void raid6_neon$#_xor_syndrome_real(int disks, int start, int stop, 102 for ( z = z0-1 ; z >= start ; z-- ) { 113 for ( z = start-1 ; z >= 0 ; z-- ) {
|
/lib/zlib_inflate/ |
D | inffast.h | 11 void inflate_fast (z_streamp strm, unsigned start);
|
/lib/raid6/test/ |
D | test.c | 31 static void makedata(int start, int stop) in makedata() argument 35 for (i = start; i <= stop; i++) { in makedata()
|