Home
last modified time | relevance | path

Searched refs:oe (Results 1 – 13 of 13) sorted by relevance

/fs/overlayfs/
Dovl_entry.h128 static inline unsigned int ovl_numlower(struct ovl_entry *oe) in ovl_numlower() argument
130 return oe ? oe->__numlower : 0; in ovl_numlower()
133 static inline struct ovl_path *ovl_lowerstack(struct ovl_entry *oe) in ovl_lowerstack() argument
135 return ovl_numlower(oe) ? oe->__lowerstack : NULL; in ovl_lowerstack()
138 static inline struct ovl_path *ovl_lowerpath(struct ovl_entry *oe) in ovl_lowerpath() argument
140 return ovl_lowerstack(oe); in ovl_lowerpath()
143 static inline struct ovl_path *ovl_lowerdata(struct ovl_entry *oe) in ovl_lowerdata() argument
145 struct ovl_path *lowerstack = ovl_lowerstack(oe); in ovl_lowerdata()
147 return lowerstack ? &lowerstack[oe->__numlower - 1] : NULL; in ovl_lowerdata()
151 static inline struct dentry *ovl_lowerdata_dentry(struct ovl_entry *oe) in ovl_lowerdata_dentry() argument
[all …]
Dutil.c127 struct ovl_entry *oe = kzalloc(size, GFP_KERNEL); in ovl_alloc_entry() local
129 if (oe) in ovl_alloc_entry()
130 oe->__numlower = numlower; in ovl_alloc_entry()
132 return oe; in ovl_alloc_entry()
135 void ovl_free_entry(struct ovl_entry *oe) in ovl_free_entry() argument
137 ovl_stack_put(ovl_lowerstack(oe), ovl_numlower(oe)); in ovl_free_entry()
138 kfree(oe); in ovl_free_entry()
159 struct ovl_entry *oe) in ovl_dentry_init_reval() argument
161 return ovl_dentry_init_flags(dentry, upperdentry, oe, OVL_D_REVALIDATE); in ovl_dentry_init_reval()
165 struct ovl_entry *oe, unsigned int mask) in ovl_dentry_init_flags() argument
[all …]
Dexport.c79 struct ovl_entry *oe = OVL_E(dentry); in ovl_connectable_layer() local
83 return ovl_numlower(oe); in ovl_connectable_layer()
94 return ovl_lowerstack(oe)->layer->idx; in ovl_connectable_layer()
108 struct ovl_entry *oe = OVL_E(dentry); in ovl_connect_layer() local
116 origin_layer = ovl_lowerstack(oe)->layer->idx; in ovl_connect_layer()
303 struct ovl_entry *oe; in ovl_obtain_alias() local
312 oe = ovl_alloc_entry(!!lower); in ovl_obtain_alias()
313 if (!oe) in ovl_obtain_alias()
318 ovl_lowerstack(oe)->dentry = dget(lower); in ovl_obtain_alias()
319 ovl_lowerstack(oe)->layer = lowerpath->layer; in ovl_obtain_alias()
[all …]
Dsuper.c108 struct ovl_entry *oe; in ovl_dentry_revalidate_common() local
119 oe = OVL_I_E(inode); in ovl_dentry_revalidate_common()
120 lowerstack = ovl_lowerstack(oe); in ovl_dentry_revalidate_common()
125 for (i = 0; ret > 0 && i < ovl_numlower(oe); i++) in ovl_dentry_revalidate_common()
162 oi->oe = NULL; in ovl_alloc_inode()
173 kfree(oi->oe); in ovl_free_inode()
183 ovl_stack_put(ovl_lowerstack(oi->oe), ovl_numlower(oi->oe)); in ovl_destroy_inode()
879 struct ovl_entry *oe, const struct path *upperpath) in ovl_get_indexdir() argument
891 ovl_lowerstack(oe)->dentry, true); in ovl_get_indexdir()
1174 struct ovl_entry *oe; in ovl_get_lowerstack() local
[all …]
Dnamei.c855 struct ovl_entry *oe = OVL_E(dentry); in ovl_path_next() local
856 struct ovl_path *lowerstack = ovl_lowerstack(oe); in ovl_path_next()
862 return ovl_numlower(oe) ? 1 : -1; in ovl_path_next()
865 BUG_ON(idx > ovl_numlower(oe)); in ovl_path_next()
869 return (idx < ovl_numlower(oe)) ? idx + 1 : -1; in ovl_path_next()
1003 struct ovl_entry *oe = NULL; in ovl_lookup() local
1251 oe = ovl_alloc_entry(ctr); in ovl_lookup()
1253 if (!oe) in ovl_lookup()
1256 ovl_stack_cpy(ovl_lowerstack(oe), stack, ctr); in ovl_lookup()
1291 .oe = oe, in ovl_lookup()
[all …]
Dinode.c1018 oi->oe = oip->oe; in ovl_inode_init()
1335 struct ovl_path *lowerpath = ovl_lowerpath(oip->oe); in ovl_get_inode()
1379 ovl_free_entry(oip->oe); in ovl_get_inode()
1415 if (((upperdentry && lowerdentry) || ovl_numlower(oip->oe) > 1) || in ovl_get_inode()
Doverlayfs.h423 void ovl_free_entry(struct ovl_entry *oe);
427 struct ovl_entry *oe);
429 struct ovl_entry *oe, unsigned int mask);
744 struct ovl_entry *oe; member
/fs/omfs/
Dfile.c22 struct omfs_extent *oe = (struct omfs_extent *) &bh->b_data[offset]; in omfs_make_empty_table() local
24 oe->e_next = ~cpu_to_be64(0ULL); in omfs_make_empty_table()
25 oe->e_extent_count = cpu_to_be32(1), in omfs_make_empty_table()
26 oe->e_fill = cpu_to_be32(0x22), in omfs_make_empty_table()
27 oe->e_entry[0].e_cluster = ~cpu_to_be64(0ULL); in omfs_make_empty_table()
28 oe->e_entry[0].e_blocks = ~cpu_to_be64(0ULL); in omfs_make_empty_table()
34 struct omfs_extent *oe; in omfs_shrink_inode() local
56 oe = (struct omfs_extent *)(&bh->b_data[OMFS_EXTENT_START]); in omfs_shrink_inode()
64 extent_count = be32_to_cpu(oe->e_extent_count); in omfs_shrink_inode()
70 next = be64_to_cpu(oe->e_next); in omfs_shrink_inode()
[all …]
/fs/ntfs3/
Dfslog.c2956 struct OPEN_ATTR_ENRTY *oe = NULL; in find_loaded_attr() local
2958 while ((oe = enum_rstbl(log->open_attr_tbl, oe))) { in find_loaded_attr()
2961 if (ino_get(&oe->ref) != rno) in find_loaded_attr()
2964 op_attr = (struct OpenAttr *)oe->ptr; in find_loaded_attr()
3020 static int do_action(struct ntfs_log *log, struct OPEN_ATTR_ENRTY *oe, in do_action() argument
3053 oa = oe->ptr; in do_action()
3161 t32 = le32_to_cpu(oe->bytes_per_index); in do_action()
3744 struct OPEN_ATTR_ENRTY *oe; in log_replay() local
4303 oe = NULL; in log_replay()
4304 while ((oe = enum_rstbl(oatbl, oe))) { in log_replay()
[all …]
/fs/ext2/
Dxattr.c524 struct mb_cache_entry *oe; in ext2_xattr_set() local
526 oe = mb_cache_entry_delete_or_get(EA_BLOCK_CACHE(inode), in ext2_xattr_set()
528 if (!oe) { in ext2_xattr_set()
535 mb_cache_entry_put(EA_BLOCK_CACHE(inode), oe); in ext2_xattr_set()
662 struct mb_cache_entry *oe; in ext2_xattr_release_block() local
668 oe = mb_cache_entry_delete_or_get(ea_block_cache, hash, in ext2_xattr_release_block()
670 if (oe) { in ext2_xattr_release_block()
676 mb_cache_entry_wait_unused(oe); in ext2_xattr_release_block()
677 mb_cache_entry_put(ea_block_cache, oe); in ext2_xattr_release_block()
/fs/ext4/
Dxattr.c473 struct mb_cache_entry *oe; in ext4_evict_ea_inode() local
478 while ((oe = mb_cache_entry_delete_or_get(EA_INODE_CACHE(inode), in ext4_evict_ea_inode()
480 mb_cache_entry_wait_unused(oe); in ext4_evict_ea_inode()
481 mb_cache_entry_put(EA_INODE_CACHE(inode), oe); in ext4_evict_ea_inode()
1294 struct mb_cache_entry *oe; in ext4_xattr_release_block() local
1296 oe = mb_cache_entry_delete_or_get(ea_block_cache, hash, in ext4_xattr_release_block()
1298 if (oe) { in ext4_xattr_release_block()
1300 mb_cache_entry_wait_unused(oe); in ext4_xattr_release_block()
1301 mb_cache_entry_put(ea_block_cache, oe); in ext4_xattr_release_block()
1945 struct mb_cache_entry *oe; in ext4_xattr_block_set() local
[all …]
/fs/btrfs/
Dfile.c3258 struct btrfs_ordered_extent *oe; in find_delalloc_subrange() local
3322 oe = btrfs_lookup_first_ordered_range(inode, start, len); in find_delalloc_subrange()
3323 if (!oe) in find_delalloc_subrange()
3327 oe_start = max(oe->file_offset, start); in find_delalloc_subrange()
3328 oe_end = min(oe->file_offset + oe->num_bytes - 1, end); in find_delalloc_subrange()
3330 btrfs_put_ordered_extent(oe); in find_delalloc_subrange()
Dinode.c2972 struct btrfs_ordered_extent *oe) in insert_ordered_extent_file_extent() argument
2976 u64 num_bytes = oe->num_bytes; in insert_ordered_extent_file_extent()
2977 u64 ram_bytes = oe->ram_bytes; in insert_ordered_extent_file_extent()
2981 btrfs_set_stack_file_extent_disk_bytenr(&stack_fi, oe->disk_bytenr); in insert_ordered_extent_file_extent()
2983 oe->disk_num_bytes); in insert_ordered_extent_file_extent()
2984 btrfs_set_stack_file_extent_offset(&stack_fi, oe->offset); in insert_ordered_extent_file_extent()
2985 if (test_bit(BTRFS_ORDERED_TRUNCATED, &oe->flags)) { in insert_ordered_extent_file_extent()
2986 num_bytes = oe->truncated_len; in insert_ordered_extent_file_extent()
2991 btrfs_set_stack_file_extent_compression(&stack_fi, oe->compress_type); in insert_ordered_extent_file_extent()
3000 update_inode_bytes = test_bit(BTRFS_ORDERED_DIRECT, &oe->flags) || in insert_ordered_extent_file_extent()
[all …]