Home
last modified time | relevance | path

Searched refs:n (Results 1 – 25 of 318) sorted by relevance

12345678910>>...13

/fs/befs/
Dendian.h16 fs64_to_cpu(const struct super_block *sb, fs64 n) in fs64_to_cpu() argument
19 return le64_to_cpu((__force __le64)n); in fs64_to_cpu()
21 return be64_to_cpu((__force __be64)n); in fs64_to_cpu()
25 cpu_to_fs64(const struct super_block *sb, u64 n) in cpu_to_fs64() argument
28 return (__force fs64)cpu_to_le64(n); in cpu_to_fs64()
30 return (__force fs64)cpu_to_be64(n); in cpu_to_fs64()
34 fs32_to_cpu(const struct super_block *sb, fs32 n) in fs32_to_cpu() argument
37 return le32_to_cpu((__force __le32)n); in fs32_to_cpu()
39 return be32_to_cpu((__force __be32)n); in fs32_to_cpu()
43 cpu_to_fs32(const struct super_block *sb, u32 n) in cpu_to_fs32() argument
[all …]
/fs/ufs/
Dswab.h27 fs64_to_cpu(struct super_block *sbp, __fs64 n) in fs64_to_cpu() argument
30 return le64_to_cpu((__force __le64)n); in fs64_to_cpu()
32 return be64_to_cpu((__force __be64)n); in fs64_to_cpu()
36 cpu_to_fs64(struct super_block *sbp, u64 n) in cpu_to_fs64() argument
39 return (__force __fs64)cpu_to_le64(n); in cpu_to_fs64()
41 return (__force __fs64)cpu_to_be64(n); in cpu_to_fs64()
45 fs32_to_cpu(struct super_block *sbp, __fs32 n) in fs32_to_cpu() argument
48 return le32_to_cpu((__force __le32)n); in fs32_to_cpu()
50 return be32_to_cpu((__force __be32)n); in fs32_to_cpu()
54 cpu_to_fs32(struct super_block *sbp, u32 n) in cpu_to_fs32() argument
[all …]
/fs/ubifs/
Dtnc.c181 struct ubifs_old_idx *old_idx, *n; in destroy_old_idx() local
183 rbtree_postorder_for_each_entry_safe(old_idx, n, &c->old_idx, rb) in destroy_old_idx()
243 const int n = new_zn->child_cnt; in replace_znode() local
246 for (i = 0; i < n; i++) { in replace_znode()
618 struct ubifs_znode *znode, int n) in get_znode() argument
622 zbr = &znode->zbranch[n]; in get_znode()
626 znode = ubifs_load_znode(c, zbr, znode, n); in get_znode()
639 static int tnc_next(struct ubifs_info *c, struct ubifs_znode **zn, int *n) in tnc_next() argument
642 int nn = *n; in tnc_next()
646 *n = nn; in tnc_next()
[all …]
/fs/fscache/
Dhistogram.c26 unsigned n[5], t; in fscache_histogram_show() local
37 n[0] = atomic_read(&fscache_obj_instantiate_histogram[index]); in fscache_histogram_show()
38 n[1] = atomic_read(&fscache_ops_histogram[index]); in fscache_histogram_show()
39 n[2] = atomic_read(&fscache_objs_histogram[index]); in fscache_histogram_show()
40 n[3] = atomic_read(&fscache_retrieval_delay_histogram[index]); in fscache_histogram_show()
41 n[4] = atomic_read(&fscache_retrieval_histogram[index]); in fscache_histogram_show()
42 if (!(n[0] | n[1] | n[2] | n[3] | n[4])) in fscache_histogram_show()
48 index, t, n[0], n[1], n[2], n[3], n[4]); in fscache_histogram_show()
/fs/sysv/
Dsysv.h188 static inline __u32 fs32_to_cpu(struct sysv_sb_info *sbi, __fs32 n) in fs32_to_cpu() argument
191 return PDP_swab((__force __u32)n); in fs32_to_cpu()
193 return le32_to_cpu((__force __le32)n); in fs32_to_cpu()
195 return be32_to_cpu((__force __be32)n); in fs32_to_cpu()
198 static inline __fs32 cpu_to_fs32(struct sysv_sb_info *sbi, __u32 n) in cpu_to_fs32() argument
201 return (__force __fs32)PDP_swab(n); in cpu_to_fs32()
203 return (__force __fs32)cpu_to_le32(n); in cpu_to_fs32()
205 return (__force __fs32)cpu_to_be32(n); in cpu_to_fs32()
208 static inline __fs32 fs32_add(struct sysv_sb_info *sbi, __fs32 *n, int d) in fs32_add() argument
211 *(__u32*)n = PDP_swab(PDP_swab(*(__u32*)n)+d); in fs32_add()
[all …]
Ditree.c30 int n = 0; in block_to_path() local
35 offsets[n++] = block; in block_to_path()
37 offsets[n++] = DIRECT; in block_to_path()
38 offsets[n++] = block; in block_to_path()
40 offsets[n++] = DIRECT+1; in block_to_path()
41 offsets[n++] = block >> ptrs_bits; in block_to_path()
42 offsets[n++] = block & (indirect_blocks - 1); in block_to_path()
44 offsets[n++] = DIRECT+2; in block_to_path()
45 offsets[n++] = block >> (ptrs_bits * 2); in block_to_path()
46 offsets[n++] = (block >> ptrs_bits) & (indirect_blocks - 1); in block_to_path()
[all …]
/fs/minix/
Ditree_v2.c9 static inline unsigned long block_to_cpu(block_t n) in block_to_cpu() argument
11 return n; in block_to_cpu()
14 static inline block_t cpu_to_block(unsigned long n) in cpu_to_block() argument
16 return n; in cpu_to_block()
29 int n = 0; in block_to_path() local
41 offsets[n++] = block; in block_to_path()
43 offsets[n++] = DIRCOUNT; in block_to_path()
44 offsets[n++] = block; in block_to_path()
46 offsets[n++] = DIRCOUNT + 1; in block_to_path()
47 offsets[n++] = block / INDIRCOUNT(sb); in block_to_path()
[all …]
Ditree_v1.c10 static inline unsigned long block_to_cpu(block_t n) in block_to_cpu() argument
12 return n; in block_to_cpu()
15 static inline block_t cpu_to_block(unsigned long n) in cpu_to_block() argument
17 return n; in cpu_to_block()
27 int n = 0; in block_to_path() local
38 offsets[n++] = block; in block_to_path()
40 offsets[n++] = 7; in block_to_path()
41 offsets[n++] = block; in block_to_path()
44 offsets[n++] = 8; in block_to_path()
45 offsets[n++] = block>>9; in block_to_path()
[all …]
Ditree_common.c75 int n = 0; in alloc_branch() local
81 if (parent) for (n = 1; n < num; n++) { in alloc_branch()
87 branch[n].key = cpu_to_block(nr); in alloc_branch()
96 branch[n].bh = bh; in alloc_branch()
97 branch[n].p = (block_t*) bh->b_data + offsets[n]; in alloc_branch()
98 *branch[n].p = branch[n].key; in alloc_branch()
104 if (n == num) in alloc_branch()
108 for (i = 1; i < n; i++) in alloc_branch()
110 for (i = 0; i < n; i++) in alloc_branch()
308 int n; in truncate() local
[all …]
/fs/qnx6/
Dqnx6.h78 static inline __u64 fs64_to_cpu(struct qnx6_sb_info *sbi, __fs64 n) in fs64_to_cpu() argument
81 return le64_to_cpu((__force __le64)n); in fs64_to_cpu()
83 return be64_to_cpu((__force __be64)n); in fs64_to_cpu()
86 static inline __fs64 cpu_to_fs64(struct qnx6_sb_info *sbi, __u64 n) in cpu_to_fs64() argument
89 return (__force __fs64)cpu_to_le64(n); in cpu_to_fs64()
91 return (__force __fs64)cpu_to_be64(n); in cpu_to_fs64()
94 static inline __u32 fs32_to_cpu(struct qnx6_sb_info *sbi, __fs32 n) in fs32_to_cpu() argument
97 return le32_to_cpu((__force __le32)n); in fs32_to_cpu()
99 return be32_to_cpu((__force __be32)n); in fs32_to_cpu()
102 static inline __fs32 cpu_to_fs32(struct qnx6_sb_info *sbi, __u32 n) in cpu_to_fs32() argument
[all …]
Ddir.c27 static struct page *qnx6_get_page(struct inode *dir, unsigned long n) in qnx6_get_page() argument
30 struct page *page = read_mapping_page(mapping, n, NULL); in qnx6_get_page()
51 u32 n = s >> (PAGE_SHIFT - sb->s_blocksize_bits); /* in pages */ in qnx6_longname() local
55 struct page *page = read_mapping_page(mapping, n, NULL); in qnx6_longname()
119 unsigned long n = pos >> PAGE_SHIFT; in qnx6_readdir() local
127 for ( ; !done && n < npages; n++, start = 0) { in qnx6_readdir()
128 struct page *page = qnx6_get_page(inode, n); in qnx6_readdir()
129 int limit = last_entry(inode, n); in qnx6_readdir()
135 ctx->pos = (n + 1) << PAGE_SHIFT; in qnx6_readdir()
219 unsigned long start, n; in qnx6_find_entry() local
[all …]
/fs/ceph/
Dmdsmap.c21 int n = 0; in __mdsmap_get_random_mds() local
27 n++; in __mdsmap_get_random_mds()
28 if (n == 0) in __mdsmap_get_random_mds()
32 n = prandom_u32() % n; in __mdsmap_get_random_mds()
36 if (j > n) in __mdsmap_get_random_mds()
66 u32 n; \
68 ceph_decode_32_safe(p, end, n, bad); \
69 need = sizeof(type) * n; \
76 u32 n; \
78 ceph_decode_32_safe(p, end, n, bad); \
[all …]
/fs/jfs/
Djfs_unicode.h43 size_t n) in UniStrncpy_le() argument
47 while (n-- && *ucs2) /* Copy the strings */ in UniStrncpy_le()
50 n++; in UniStrncpy_le()
51 while (n--) /* Pad with nulls */ in UniStrncpy_le()
60 size_t n) in UniStrncmp_le() argument
62 if (!n) in UniStrncmp_le()
64 while ((*ucs1 == __le16_to_cpu(*ucs2)) && *ucs1 && --n) { in UniStrncmp_le()
75 size_t n) in UniStrncpy_to_le() argument
79 while (n-- && *ucs2) /* Copy the strings */ in UniStrncpy_to_le()
82 n++; in UniStrncpy_to_le()
[all …]
/fs/
Dselect.c413 #define FDS_IN(fds, n) (fds->in + n) argument
414 #define FDS_OUT(fds, n) (fds->out + n) argument
415 #define FDS_EX(fds, n) (fds->ex + n) argument
417 #define BITS(fds, n) (*FDS_IN(fds, n)|*FDS_OUT(fds, n)|*FDS_EX(fds, n)) argument
419 static int max_select_fd(unsigned long n, fd_set_bits *fds) in max_select_fd() argument
427 set = ~(~0UL << (n & (BITS_PER_LONG-1))); in max_select_fd()
428 n /= BITS_PER_LONG; in max_select_fd()
430 open_fds = fdt->open_fds + n; in max_select_fd()
433 set &= BITS(fds, n); in max_select_fd()
440 while (n) { in max_select_fd()
[all …]
Dchar_dev.c204 dev_t n, next; in register_chrdev_region() local
206 for (n = from; n < to; n = next) { in register_chrdev_region()
207 next = MKDEV(MAJOR(n)+1, 0); in register_chrdev_region()
210 cd = __register_chrdev_region(MAJOR(n), MINOR(n), in register_chrdev_region()
211 next - n, name); in register_chrdev_region()
217 to = n; in register_chrdev_region()
218 for (n = from; n < to; n = next) { in register_chrdev_region()
219 next = MKDEV(MAJOR(n)+1, 0); in register_chrdev_region()
220 kfree(__unregister_chrdev_region(MAJOR(n), MINOR(n), next - n)); in register_chrdev_region()
314 dev_t n, next; in unregister_chrdev_region() local
[all …]
/fs/xfs/libxfs/
Dxfs_bit.h16 static inline uint64_t xfs_mask64hi(int n) in xfs_mask64hi() argument
18 return (uint64_t)-1 << (64 - (n)); in xfs_mask64hi()
20 static inline uint32_t xfs_mask32lo(int n) in xfs_mask32lo() argument
22 return ((uint32_t)1 << (n)) - 1; in xfs_mask32lo()
24 static inline uint64_t xfs_mask64lo(int n) in xfs_mask64lo() argument
26 return ((uint64_t)1 << (n)) - 1; in xfs_mask64lo()
51 int n = 0; in xfs_lowbit64() local
54 n = ffs(w); in xfs_lowbit64()
58 n = ffs(w); in xfs_lowbit64()
59 if (n) in xfs_lowbit64()
[all …]
/fs/ext4/
Dblock_validity.c57 struct ext4_system_zone *entry, *n; in release_system_zone() local
59 rbtree_postorder_for_each_entry_safe(entry, n, in release_system_zone()
74 struct rb_node **n = &system_blks->root.rb_node, *node; in add_system_zone() local
77 while (*n) { in add_system_zone()
78 parent = *n; in add_system_zone()
81 n = &(*n)->rb_left; in add_system_zone()
83 n = &(*n)->rb_right; in add_system_zone()
97 rb_link_node(new_node, parent, n); in add_system_zone()
155 int err = 0, n; in ext4_protect_reserved_inode() local
168 n = ext4_map_blocks(NULL, inode, &map, 0); in ext4_protect_reserved_inode()
[all …]
Dindirect.c83 int n = 0; in ext4_block_to_path() local
87 offsets[n++] = i_block; in ext4_block_to_path()
90 offsets[n++] = EXT4_IND_BLOCK; in ext4_block_to_path()
91 offsets[n++] = i_block; in ext4_block_to_path()
94 offsets[n++] = EXT4_DIND_BLOCK; in ext4_block_to_path()
95 offsets[n++] = i_block >> ptrs_bits; in ext4_block_to_path()
96 offsets[n++] = i_block & (ptrs - 1); in ext4_block_to_path()
99 offsets[n++] = EXT4_TIND_BLOCK; in ext4_block_to_path()
100 offsets[n++] = i_block >> (ptrs_bits * 2); in ext4_block_to_path()
101 offsets[n++] = (i_block >> ptrs_bits) & (ptrs - 1); in ext4_block_to_path()
[all …]
/fs/hpfs/
Dalloc.c115 static secno alloc_in_bmp(struct super_block *s, secno near, unsigned n, unsigned forward) in alloc_in_bmp() argument
120 unsigned nr = (near & 0x3fff) & ~(n - 1); in alloc_in_bmp()
125 if (n != 1 && n != 4) { in alloc_in_bmp()
126 hpfs_error(s, "Bad allocation size: %d", n); in alloc_in_bmp()
134 if (!tstbits(bmp, nr, n + forward)) { in alloc_in_bmp()
138 q = nr + n; b = 0; in alloc_in_bmp()
139 while ((a = tstbits(bmp, q, n + forward)) != 0) { in alloc_in_bmp()
141 if (n != 1) q = ((q-1)&~(n-1))+n; in alloc_in_bmp()
158 if (n + forward >= 0x3f && le32_to_cpu(bmp[i]) != 0xffffffff) goto cont; in alloc_in_bmp()
166 if (n != 1) q = ((q-1)&~(n-1))+n; in alloc_in_bmp()
[all …]
/fs/squashfs/
Dfile_direct.c36 int i, n, pages, missing_pages, bytes, res = -ENOMEM; in squashfs_readpage_block() local
59 for (missing_pages = 0, i = 0, n = start_index; i < pages; i++, n++) { in squashfs_readpage_block()
60 page[i] = (n == target_page->index) ? target_page : in squashfs_readpage_block()
61 grab_cache_page_nowait(target_page->mapping, n); in squashfs_readpage_block()
150 int res = buffer->error, n, offset = 0; in squashfs_read_cache() local
158 for (n = 0; n < pages && bytes > 0; n++, in squashfs_read_cache()
162 if (page[n] == NULL) in squashfs_read_cache()
165 squashfs_fill_page(page[n], buffer, offset, avail); in squashfs_read_cache()
166 unlock_page(page[n]); in squashfs_read_cache()
167 if (page[n] != target_page) in squashfs_read_cache()
[all …]
/fs/btrfs/
Dref-verify.c100 struct rb_node *n; in lookup_block_entry() local
103 n = root->rb_node; in lookup_block_entry()
104 while (n) { in lookup_block_entry()
105 entry = rb_entry(n, struct block_entry, node); in lookup_block_entry()
107 n = n->rb_right; in lookup_block_entry()
109 n = n->rb_left; in lookup_block_entry()
189 struct rb_node *n; in lookup_root_entry() local
192 n = root->rb_node; in lookup_root_entry()
193 while (n) { in lookup_root_entry()
194 entry = rb_entry(n, struct root_entry, node); in lookup_root_entry()
[all …]
/fs/cifs/
Dcifs_unicode.h210 UniStrncat(wchar_t *ucs1, const wchar_t *ucs2, size_t n) in UniStrncat() argument
216 while (n-- && (*ucs1 = *ucs2)) { /* copy s2 after s1 */ in UniStrncat()
228 UniStrncmp(const wchar_t *ucs1, const wchar_t *ucs2, size_t n) in UniStrncmp() argument
230 if (!n) in UniStrncmp()
232 while ((*ucs1 == *ucs2) && *ucs1 && --n) { in UniStrncmp()
243 UniStrncmp_le(const wchar_t *ucs1, const wchar_t *ucs2, size_t n) in UniStrncmp_le() argument
245 if (!n) in UniStrncmp_le()
247 while ((*ucs1 == __le16_to_cpu(*ucs2)) && *ucs1 && --n) { in UniStrncmp_le()
258 UniStrncpy(wchar_t *ucs1, const wchar_t *ucs2, size_t n) in UniStrncpy() argument
262 while (n-- && *ucs2) /* Copy the strings */ in UniStrncpy()
[all …]
/fs/nls/
Dnls_euc-jp.c353 int n; in euc2sjisibm() local
366 if ((n = euc2sjisibm_g3upper(sjis, euc_hi, euc_lo))) { in euc2sjisibm()
367 return n; in euc2sjisibm()
368 } else if ((n = euc2sjisibm_jisx0212(sjis, euc_hi, euc_lo))) { in euc2sjisibm()
369 return n; in euc2sjisibm()
412 int n; in uni2char() local
416 if ((n = p_nls->uni2char(uni, out, boundlen)) < 0) in uni2char()
417 return n; in uni2char()
420 if (n == 1) { in uni2char()
430 } else if (n == 2) { in uni2char()
[all …]
/fs/hfs/
Dbitmap.c32 u32 mask, start, len, n; in hfs_find_set_zero_bits() local
46 n = be32_to_cpu(val); in hfs_find_set_zero_bits()
50 if (!(n & mask)) in hfs_find_set_zero_bits()
59 n = be32_to_cpu(val); in hfs_find_set_zero_bits()
62 if (!(n & mask)) in hfs_find_set_zero_bits()
76 n |= mask; in hfs_find_set_zero_bits()
80 if (!--len || n & mask) in hfs_find_set_zero_bits()
85 *curr++ = cpu_to_be32(n); in hfs_find_set_zero_bits()
88 n = be32_to_cpu(*curr); in hfs_find_set_zero_bits()
91 if (n) { in hfs_find_set_zero_bits()
[all …]
/fs/reiserfs/
Dxattr_acl.c68 int n, count; in reiserfs_posix_acl_from_disk() local
87 for (n = 0; n < count; n++) { in reiserfs_posix_acl_from_disk()
91 acl->a_entries[n].e_tag = le16_to_cpu(entry->e_tag); in reiserfs_posix_acl_from_disk()
92 acl->a_entries[n].e_perm = le16_to_cpu(entry->e_perm); in reiserfs_posix_acl_from_disk()
93 switch (acl->a_entries[n].e_tag) { in reiserfs_posix_acl_from_disk()
106 acl->a_entries[n].e_uid = in reiserfs_posix_acl_from_disk()
114 acl->a_entries[n].e_gid = in reiserfs_posix_acl_from_disk()
139 int n; in reiserfs_posix_acl_to_disk() local
150 for (n = 0; n < acl->a_count; n++) { in reiserfs_posix_acl_to_disk()
151 const struct posix_acl_entry *acl_e = &acl->a_entries[n]; in reiserfs_posix_acl_to_disk()
[all …]

12345678910>>...13