Home
last modified time | relevance | path

Searched refs:n (Results 1 – 25 of 44) sorted by relevance

12

/lib/
Dklist.c95 static void add_head(struct klist *k, struct klist_node *n) in add_head() argument
98 list_add(&n->n_node, &k->k_list); in add_head()
102 static void add_tail(struct klist *k, struct klist_node *n) in add_tail() argument
105 list_add_tail(&n->n_node, &k->k_list); in add_tail()
109 static void klist_node_init(struct klist *k, struct klist_node *n) in klist_node_init() argument
111 INIT_LIST_HEAD(&n->n_node); in klist_node_init()
112 kref_init(&n->n_ref); in klist_node_init()
113 knode_set_klist(n, k); in klist_node_init()
115 k->get(n); in klist_node_init()
123 void klist_add_head(struct klist_node *n, struct klist *k) in klist_add_head() argument
[all …]
Didr.c223 int n, m, sh; in sub_alloc() local
236 n = (id >> (IDR_BITS*l)) & IDR_MASK; in sub_alloc()
237 m = find_next_zero_bit(p->bitmap, IDR_SIZE, n); in sub_alloc()
261 if (m != n) { in sub_alloc()
263 id = ((id >> sh) ^ n ^ m) << sh; in sub_alloc()
514 int n; in sub_remove() local
520 n = (id >> shift) & IDR_MASK; in sub_remove()
521 __clear_bit(n, p->bitmap); in sub_remove()
522 *++paa = &p->ary[n]; in sub_remove()
523 p = p->ary[n]; in sub_remove()
[all …]
Doid_registry.c114 unsigned char n; in sprint_oid() local
121 n = *v++; in sprint_oid()
122 ret = count = snprintf(buffer, bufsize, "%u.%u", n / 40, n % 40); in sprint_oid()
130 n = *v++; in sprint_oid()
131 if (!(n & 0x80)) { in sprint_oid()
132 num = n; in sprint_oid()
134 num = n & 0x7f; in sprint_oid()
138 n = *v++; in sprint_oid()
140 num |= n & 0x7f; in sprint_oid()
141 } while (n & 0x80); in sprint_oid()
Dcpumask.c16 int cpumask_next_and(int n, const struct cpumask *src1p, in cpumask_next_and() argument
19 while ((n = cpumask_next(n, src1p)) < nr_cpu_ids) in cpumask_next_and()
20 if (cpumask_test_cpu(n, src2p)) in cpumask_next_and()
22 return n; in cpumask_next_and()
57 int cpumask_next_wrap(int n, const struct cpumask *mask, int start, bool wrap) in cpumask_next_wrap() argument
62 next = cpumask_next(n, mask); in cpumask_next_wrap()
64 if (wrap && n < start && next >= start) { in cpumask_next_wrap()
69 n = -1; in cpumask_next_wrap()
Dinflate.c142 ush n; /* literal, length base, or distance base */ member
233 #define NEEDBITS(n) {while(k<(n)){b|=((ulg)NEXTBYTE())<<k;k+=8;}} argument
234 #define DUMPBITS(n) {b>>=(n);k-=(n);} argument
323 unsigned n, /* number of codes (assumed <= N_MAX) */ in huft_build() argument
374 p = b; i = n; in huft_build()
376 Tracecv(*p, (stderr, (n-i >= ' ' && n-i <= '~' ? "%c %d\n" : "0x%x %d\n"), in huft_build()
377 n-i, *p)); in huft_build()
381 if (c[0] == n) /* null input--all zero length codes */ in huft_build()
437 } while (++i < n); in huft_build()
438 n = x[g]; /* set n to length of v */ in huft_build()
[all …]
Diov_iter.c8 #define iterate_iovec(i, n, __v, __p, skip, STEP) { \ argument
10 size_t wanted = n; \
12 __v.iov_len = min(n, __p->iov_len - skip); \
18 n -= __v.iov_len; \
22 while (unlikely(!left && n)) { \
24 __v.iov_len = min(n, __p->iov_len); \
31 n -= __v.iov_len; \
33 n = wanted - n; \
36 #define iterate_kvec(i, n, __v, __p, skip, STEP) { \ argument
37 size_t wanted = n; \
[all …]
Dkfifo.c312 int n; in setup_sgl_buf() local
323 n = 0; in setup_sgl_buf()
337 if (++n == nents || sgl == NULL) in setup_sgl_buf()
338 return n; in setup_sgl_buf()
345 return n + 1; in setup_sgl_buf()
354 unsigned int n; in setup_sgl() local
364 n = setup_sgl_buf(sgl, fifo->data + off, nents, l); in setup_sgl()
365 n += setup_sgl_buf(sgl + n, fifo->data, nents - n, len - l); in setup_sgl()
367 return n; in setup_sgl()
435 static void __kfifo_poke_n(struct __kfifo *fifo, unsigned int n, size_t recsize) in __kfifo_poke_n() argument
[all …]
Ddiv64.c26 uint32_t __attribute__((weak)) __div64_32(uint64_t *n, uint32_t base) in __div64_32() argument
28 uint64_t rem = *n; in __div64_32()
55 *n = res; in __div64_32()
103 int n = fls(high); in div64_u64_rem() local
104 quot = div_u64(dividend >> n, divisor >> n); in div64_u64_rem()
141 int n = fls(high); in div64_u64() local
142 quot = div_u64(dividend >> n, divisor >> n); in div64_u64()
Drational.c35 unsigned long n, d, n0, d0, n1, d1; in rational_best_approximation() local
36 n = given_numerator; in rational_best_approximation()
50 a = n / d; in rational_best_approximation()
51 d = n % d; in rational_best_approximation()
52 n = t; in rational_best_approximation()
Dglob.c253 unsigned n = 0; in glob_init() local
271 n++; in glob_init()
274 n -= successes; in glob_init()
275 printk(message, successes, n); in glob_init()
278 return n ? -ECANCELED : 0; in glob_init()
Dfault-inject.c71 int n; in fail_stacktrace() local
83 for (n = 0; n < trace.nr_entries; n++) { in fail_stacktrace()
84 if (attr->reject_start <= entries[n] && in fail_stacktrace()
85 entries[n] < attr->reject_end) in fail_stacktrace()
87 if (attr->require_start <= entries[n] && in fail_stacktrace()
88 entries[n] < attr->require_end) in fail_stacktrace()
Dasn1_decoder.c65 size_t dp = *_dp, len, n; in asn1_find_indefinite_length() local
110 n = len - 0x80; in asn1_find_indefinite_length()
111 if (unlikely(n > sizeof(len) - 1)) in asn1_find_indefinite_length()
113 if (unlikely(n > datalen - dp)) in asn1_find_indefinite_length()
116 for (; n > 0; n--) { in asn1_find_indefinite_length()
273 int n = len - 0x80; in asn1_ber_decoder() local
274 if (unlikely(n > 2)) in asn1_ber_decoder()
276 if (unlikely(n > datalen - dp)) in asn1_ber_decoder()
278 hdr += n; in asn1_ber_decoder()
279 for (len = 0; n > 0; n--) { in asn1_ber_decoder()
Drbtree.c452 struct rb_node *n; in rb_first() local
454 n = root->rb_node; in rb_first()
455 if (!n) in rb_first()
457 while (n->rb_left) in rb_first()
458 n = n->rb_left; in rb_first()
459 return n; in rb_first()
465 struct rb_node *n; in rb_last() local
467 n = root->rb_node; in rb_last()
468 if (!n) in rb_last()
470 while (n->rb_right) in rb_last()
[all …]
Dstrnlen_user.c9 # define aligned_byte_mask(n) ((1ul << 8*(n))-1) argument
11 # define aligned_byte_mask(n) (~0xfful << (BITS_PER_LONG - 8 - 8*(n))) argument
Dbch.c84 #define GF_N(_p) ((_p)->n)
258 const unsigned int n = GF_N(bch); in modulo() local
259 while (v >= n) { in modulo()
260 v -= n; in modulo()
261 v = (v & n) + (v >> GF_M(bch)); in modulo()
271 const unsigned int n = GF_N(bch); in mod_s() local
272 return (v < n) ? v : v-n; in mod_s()
380 const unsigned int n = GF_N(bch); in compute_error_locator_polynomial() local
401 tmp = a_log(bch, d)+n-a_log(bch, pd); in compute_error_locator_polynomial()
548 int n = 0; in find_poly_deg1_roots() local
[all …]
Diommu-common.c105 unsigned long n, end, start, limit, boundary_size; in iommu_tbl_range_alloc() local
185 n = iommu_area_alloc(iommu->map, limit, start, npages, shift, in iommu_tbl_range_alloc()
187 if (n == -1) { in iommu_tbl_range_alloc()
205 n = IOMMU_ERROR_CODE; in iommu_tbl_range_alloc()
210 (n < pool->hint || need_flush(iommu))) { in iommu_tbl_range_alloc()
215 end = n + npages; in iommu_tbl_range_alloc()
224 return n; in iommu_tbl_range_alloc()
Dplist.c34 struct list_head *n) in plist_check_prev_next() argument
36 WARN(n->prev != p || p->next != n, in plist_check_prev_next()
42 n, n->next, n->prev); in plist_check_prev_next()
Dsort.c64 int i = (num/2 - 1) * size, n = num * size, c, r; in sort() local
77 for (r = i; r * 2 + size < n; r = c) { in sort()
79 if (c < n - size && in sort()
89 for (i = n - size; i > 0; i -= size) { in sort()
/lib/zlib_deflate/
Ddeftree.c228 int n; /* iterates over tree elements */ in tr_static_init() local
242 for (n = 0; n < (1<<extra_lbits[code]); n++) { in tr_static_init()
257 for (n = 0; n < (1<<extra_dbits[code]); n++) { in tr_static_init()
265 for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) { in tr_static_init()
273 n = 0; in tr_static_init()
274 while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++; in tr_static_init()
275 while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++; in tr_static_init()
276 while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++; in tr_static_init()
277 while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++; in tr_static_init()
285 for (n = 0; n < D_CODES; n++) { in tr_static_init()
[all …]
/lib/mpi/
Dmpi-bit.c41 unsigned n; in mpi_get_nbits() local
48 n = count_leading_zeros(alimb); in mpi_get_nbits()
50 n = BITS_PER_MPI_LIMB; in mpi_get_nbits()
51 n = BITS_PER_MPI_LIMB - n + (a->nlimbs - 1) * BITS_PER_MPI_LIMB; in mpi_get_nbits()
53 n = 0; in mpi_get_nbits()
54 return n; in mpi_get_nbits()
Dmpi-internal.h76 #define MPN_COPY(d, s, n) \ argument
79 for (_i = 0; _i < (n); _i++) \
83 #define MPN_COPY_INCR(d, s, n) \ argument
86 for (_i = 0; _i < (n); _i++) \
90 #define MPN_COPY_DECR(d, s, n) \ argument
93 for (_i = (n)-1; _i >= 0; _i--) \
98 #define MPN_ZERO(d, n) \ argument
101 for (_i = 0; _i < (n); _i++) \
105 #define MPN_NORMALIZE(d, n) \ argument
107 while ((n) > 0) { \
[all …]
Dmpicoder.c165 unsigned int n = mpi_get_size(a); in mpi_read_buffer() local
168 if (buf_len < n || !buf || !nbytes) in mpi_read_buffer()
177 *nbytes = n - lzeros; in mpi_read_buffer()
231 unsigned int n; in mpi_get_buffer() local
237 n = mpi_get_size(a); in mpi_get_buffer()
239 if (!n) in mpi_get_buffer()
240 n++; in mpi_get_buffer()
242 buf = kmalloc(n, GFP_KERNEL); in mpi_get_buffer()
247 ret = mpi_read_buffer(a, buf, n, nbytes, sign); in mpi_get_buffer()
355 unsigned int n = mpi_get_size(a); in mpi_write_to_sgl() local
[all …]
/lib/842/
D842_decompress.c74 static int next_bits(struct sw842_param *p, u64 *d, u8 n);
76 static int __split_next_bits(struct sw842_param *p, u64 *d, u8 n, u8 s) in __split_next_bits() argument
81 if (n <= s) { in __split_next_bits()
82 pr_debug("split_next_bits invalid n %u s %u\n", n, s); in __split_next_bits()
86 ret = next_bits(p, &tmp, n - s); in __split_next_bits()
96 static int next_bits(struct sw842_param *p, u64 *d, u8 n) in next_bits() argument
98 u8 *in = p->in, b = p->bit, bits = b + n; in next_bits()
100 if (n > 64) { in next_bits()
101 pr_debug("next_bits invalid n %u\n", n); in next_bits()
109 return __split_next_bits(p, d, n, 32); in next_bits()
[all …]
D842_compress.c131 #define find_index(p, b, n) ({ \ argument
133 p->index##b[n] = INDEX_NOT_FOUND; \
134 hash_for_each_possible(p->htable##b, _n, node, p->data##b[n]) { \
135 if (p->data##b[n] == _n->data) { \
136 p->index##b[n] = _n->index; \
140 p->index##b[n] >= 0; \
143 #define check_index(p, b, n) \ argument
144 ((p)->index##b[n] == INDEX_NOT_CHECKED \
145 ? find_index(p, b, n) \
146 : (p)->index##b[n] >= 0)
[all …]
/lib/raid6/
Dunroll.awk9 n = N + 0
12 if (/\$\$/) { rep = n } else { rep = 1 }

12