Lines Matching full:i
18 #define iterate_iovec(i, n, __v, __p, skip, STEP) { \ argument
21 __p = i->iov; \
46 #define iterate_kvec(i, n, __v, __p, skip, STEP) { \ argument
48 __p = i->kvec; \
69 #define iterate_bvec(i, n, __v, __bi, skip, STEP) { \ argument
74 for_each_bvec(__v, i->bvec, __bi, __start) { \
81 #define iterate_all_kinds(i, n, v, I, B, K) { \ argument
83 size_t skip = i->iov_offset; \
84 if (unlikely(i->type & ITER_BVEC)) { \
87 iterate_bvec(i, n, v, __bi, skip, (B)) \
88 } else if (unlikely(i->type & ITER_KVEC)) { \
91 iterate_kvec(i, n, v, kvec, skip, (K)) \
92 } else if (unlikely(i->type & ITER_DISCARD)) { \
96 iterate_iovec(i, n, v, iov, skip, (I)) \
101 #define iterate_and_advance(i, n, v, I, B, K) { \ argument
102 if (unlikely(i->count < n)) \
103 n = i->count; \
104 if (i->count) { \
105 size_t skip = i->iov_offset; \
106 if (unlikely(i->type & ITER_BVEC)) { \
107 const struct bio_vec *bvec = i->bvec; \
110 iterate_bvec(i, n, v, __bi, skip, (B)) \
111 i->bvec = __bvec_iter_bvec(i->bvec, __bi); \
112 i->nr_segs -= i->bvec - bvec; \
114 } else if (unlikely(i->type & ITER_KVEC)) { \
117 iterate_kvec(i, n, v, kvec, skip, (K)) \
122 i->nr_segs -= kvec - i->kvec; \
123 i->kvec = kvec; \
124 } else if (unlikely(i->type & ITER_DISCARD)) { \
129 iterate_iovec(i, n, v, iov, skip, (I)) \
134 i->nr_segs -= iov - i->iov; \
135 i->iov = iov; \
137 i->count -= n; \
138 i->iov_offset = skip; \
165 struct iov_iter *i) in copy_page_to_iter_iovec() argument
172 if (unlikely(bytes > i->count)) in copy_page_to_iter_iovec()
173 bytes = i->count; in copy_page_to_iter_iovec()
180 iov = i->iov; in copy_page_to_iter_iovec()
181 skip = i->iov_offset; in copy_page_to_iter_iovec()
241 i->count -= wanted - bytes; in copy_page_to_iter_iovec()
242 i->nr_segs -= iov - i->iov; in copy_page_to_iter_iovec()
243 i->iov = iov; in copy_page_to_iter_iovec()
244 i->iov_offset = skip; in copy_page_to_iter_iovec()
249 struct iov_iter *i) in copy_page_from_iter_iovec() argument
256 if (unlikely(bytes > i->count)) in copy_page_from_iter_iovec()
257 bytes = i->count; in copy_page_from_iter_iovec()
264 iov = i->iov; in copy_page_from_iter_iovec()
265 skip = i->iov_offset; in copy_page_from_iter_iovec()
325 i->count -= wanted - bytes; in copy_page_from_iter_iovec()
326 i->nr_segs -= iov - i->iov; in copy_page_from_iter_iovec()
327 i->iov = iov; in copy_page_from_iter_iovec()
328 i->iov_offset = skip; in copy_page_from_iter_iovec()
333 static bool sanity(const struct iov_iter *i) in sanity() argument
335 struct pipe_inode_info *pipe = i->pipe; in sanity()
340 unsigned int i_head = i->head; in sanity()
343 if (i->iov_offset) { in sanity()
351 if (unlikely(p->offset + p->len != i->iov_offset)) in sanity()
359 printk(KERN_ERR "idx = %d, offset = %zd\n", i_head, i->iov_offset); in sanity()
372 #define sanity(i) true argument
376 struct iov_iter *i) in copy_page_to_iter_pipe() argument
378 struct pipe_inode_info *pipe = i->pipe; in copy_page_to_iter_pipe()
382 unsigned int i_head = i->head; in copy_page_to_iter_pipe()
385 if (unlikely(bytes > i->count)) in copy_page_to_iter_pipe()
386 bytes = i->count; in copy_page_to_iter_pipe()
391 if (!sanity(i)) in copy_page_to_iter_pipe()
394 off = i->iov_offset; in copy_page_to_iter_pipe()
400 i->iov_offset += bytes; in copy_page_to_iter_pipe()
417 i->iov_offset = offset + bytes; in copy_page_to_iter_pipe()
418 i->head = i_head; in copy_page_to_iter_pipe()
420 i->count -= bytes; in copy_page_to_iter_pipe()
428 * Return 0 on success, or non-zero if the memory could not be accessed (i.e.
431 int iov_iter_fault_in_readable(struct iov_iter *i, size_t bytes) in iov_iter_fault_in_readable() argument
433 size_t skip = i->iov_offset; in iov_iter_fault_in_readable()
438 if (iter_is_iovec(i)) { in iov_iter_fault_in_readable()
439 iterate_iovec(i, bytes, v, iov, skip, ({ in iov_iter_fault_in_readable()
449 void iov_iter_init(struct iov_iter *i, unsigned int direction, in iov_iter_init() argument
458 i->type = ITER_KVEC | direction; in iov_iter_init()
459 i->kvec = (struct kvec *)iov; in iov_iter_init()
461 i->type = ITER_IOVEC | direction; in iov_iter_init()
462 i->iov = iov; in iov_iter_init()
464 i->nr_segs = nr_segs; in iov_iter_init()
465 i->iov_offset = 0; in iov_iter_init()
466 i->count = count; in iov_iter_init()
496 static inline void data_start(const struct iov_iter *i, in data_start() argument
499 unsigned int p_mask = i->pipe->ring_size - 1; in data_start()
500 unsigned int iter_head = i->head; in data_start()
501 size_t off = i->iov_offset; in data_start()
503 if (off && (!allocated(&i->pipe->bufs[iter_head & p_mask]) || in data_start()
512 static size_t push_pipe(struct iov_iter *i, size_t size, in push_pipe() argument
515 struct pipe_inode_info *pipe = i->pipe; in push_pipe()
522 if (unlikely(size > i->count)) in push_pipe()
523 size = i->count; in push_pipe()
528 data_start(i, &iter_head, &off); in push_pipe()
562 struct iov_iter *i) in copy_pipe_to_iter() argument
564 struct pipe_inode_info *pipe = i->pipe; in copy_pipe_to_iter()
569 if (!sanity(i)) in copy_pipe_to_iter()
572 bytes = n = push_pipe(i, bytes, &i_head, &off); in copy_pipe_to_iter()
578 i->head = i_head; in copy_pipe_to_iter()
579 i->iov_offset = off + chunk; in copy_pipe_to_iter()
585 i->count -= bytes; in copy_pipe_to_iter()
598 struct iov_iter *i) in csum_and_copy_to_pipe_iter() argument
600 struct pipe_inode_info *pipe = i->pipe; in csum_and_copy_to_pipe_iter()
607 if (!sanity(i)) in csum_and_copy_to_pipe_iter()
610 bytes = n = push_pipe(i, bytes, &i_head, &r); in csum_and_copy_to_pipe_iter()
618 i->head = i_head; in csum_and_copy_to_pipe_iter()
619 i->iov_offset = r + chunk; in csum_and_copy_to_pipe_iter()
626 i->count -= bytes; in csum_and_copy_to_pipe_iter()
632 size_t _copy_to_iter(const void *addr, size_t bytes, struct iov_iter *i) in _copy_to_iter() argument
635 if (unlikely(iov_iter_is_pipe(i))) in _copy_to_iter()
636 return copy_pipe_to_iter(addr, bytes, i); in _copy_to_iter()
637 if (iter_is_iovec(i)) in _copy_to_iter()
639 iterate_and_advance(i, bytes, v, in _copy_to_iter()
674 struct iov_iter *i) in copy_mc_pipe_to_iter() argument
676 struct pipe_inode_info *pipe = i->pipe; in copy_mc_pipe_to_iter()
681 if (!sanity(i)) in copy_mc_pipe_to_iter()
684 bytes = n = push_pipe(i, bytes, &i_head, &off); in copy_mc_pipe_to_iter()
693 i->head = i_head; in copy_mc_pipe_to_iter()
694 i->iov_offset = off + chunk - rem; in copy_mc_pipe_to_iter()
703 i->count -= xfer; in copy_mc_pipe_to_iter()
730 size_t _copy_mc_to_iter(const void *addr, size_t bytes, struct iov_iter *i) in _copy_mc_to_iter() argument
735 if (unlikely(iov_iter_is_pipe(i))) in _copy_mc_to_iter()
736 return copy_mc_pipe_to_iter(addr, bytes, i); in _copy_mc_to_iter()
737 if (iter_is_iovec(i)) in _copy_mc_to_iter()
739 iterate_and_advance(i, bytes, v, in _copy_mc_to_iter()
767 size_t _copy_from_iter(void *addr, size_t bytes, struct iov_iter *i) in _copy_from_iter() argument
770 if (unlikely(iov_iter_is_pipe(i))) { in _copy_from_iter()
774 if (iter_is_iovec(i)) in _copy_from_iter()
776 iterate_and_advance(i, bytes, v, in _copy_from_iter()
787 bool _copy_from_iter_full(void *addr, size_t bytes, struct iov_iter *i) in _copy_from_iter_full() argument
790 if (unlikely(iov_iter_is_pipe(i))) { in _copy_from_iter_full()
794 if (unlikely(i->count < bytes)) in _copy_from_iter_full()
797 if (iter_is_iovec(i)) in _copy_from_iter_full()
799 iterate_all_kinds(i, bytes, v, ({ in _copy_from_iter_full()
809 iov_iter_advance(i, bytes); in _copy_from_iter_full()
814 size_t _copy_from_iter_nocache(void *addr, size_t bytes, struct iov_iter *i) in _copy_from_iter_nocache() argument
817 if (unlikely(iov_iter_is_pipe(i))) { in _copy_from_iter_nocache()
821 iterate_and_advance(i, bytes, v, in _copy_from_iter_nocache()
848 size_t _copy_from_iter_flushcache(void *addr, size_t bytes, struct iov_iter *i) in _copy_from_iter_flushcache() argument
851 if (unlikely(iov_iter_is_pipe(i))) { in _copy_from_iter_flushcache()
855 iterate_and_advance(i, bytes, v, in _copy_from_iter_flushcache()
869 bool _copy_from_iter_full_nocache(void *addr, size_t bytes, struct iov_iter *i) in _copy_from_iter_full_nocache() argument
872 if (unlikely(iov_iter_is_pipe(i))) { in _copy_from_iter_full_nocache()
876 if (unlikely(i->count < bytes)) in _copy_from_iter_full_nocache()
878 iterate_all_kinds(i, bytes, v, ({ in _copy_from_iter_full_nocache()
888 iov_iter_advance(i, bytes); in _copy_from_iter_full_nocache()
918 struct iov_iter *i) in copy_page_to_iter() argument
922 if (i->type & (ITER_BVEC|ITER_KVEC)) { in copy_page_to_iter()
924 size_t wanted = copy_to_iter(kaddr + offset, bytes, i); in copy_page_to_iter()
927 } else if (unlikely(iov_iter_is_discard(i))) { in copy_page_to_iter()
928 if (unlikely(i->count < bytes)) in copy_page_to_iter()
929 bytes = i->count; in copy_page_to_iter()
930 i->count -= bytes; in copy_page_to_iter()
932 } else if (likely(!iov_iter_is_pipe(i))) in copy_page_to_iter()
933 return copy_page_to_iter_iovec(page, offset, bytes, i); in copy_page_to_iter()
935 return copy_page_to_iter_pipe(page, offset, bytes, i); in copy_page_to_iter()
940 struct iov_iter *i) in copy_page_from_iter() argument
944 if (unlikely(iov_iter_is_pipe(i) || iov_iter_is_discard(i))) { in copy_page_from_iter()
948 if (i->type & (ITER_BVEC|ITER_KVEC)) { in copy_page_from_iter()
950 size_t wanted = _copy_from_iter(kaddr + offset, bytes, i); in copy_page_from_iter()
954 return copy_page_from_iter_iovec(page, offset, bytes, i); in copy_page_from_iter()
958 static size_t pipe_zero(size_t bytes, struct iov_iter *i) in pipe_zero() argument
960 struct pipe_inode_info *pipe = i->pipe; in pipe_zero()
965 if (!sanity(i)) in pipe_zero()
968 bytes = n = push_pipe(i, bytes, &i_head, &off); in pipe_zero()
975 i->head = i_head; in pipe_zero()
976 i->iov_offset = off + chunk; in pipe_zero()
981 i->count -= bytes; in pipe_zero()
985 size_t iov_iter_zero(size_t bytes, struct iov_iter *i) in iov_iter_zero() argument
987 if (unlikely(iov_iter_is_pipe(i))) in iov_iter_zero()
988 return pipe_zero(bytes, i); in iov_iter_zero()
989 iterate_and_advance(i, bytes, v, in iov_iter_zero()
1000 struct iov_iter *i, unsigned long offset, size_t bytes) in iov_iter_copy_from_user_atomic() argument
1007 if (unlikely(iov_iter_is_pipe(i) || iov_iter_is_discard(i))) { in iov_iter_copy_from_user_atomic()
1012 iterate_all_kinds(i, bytes, v, in iov_iter_copy_from_user_atomic()
1023 static inline void pipe_truncate(struct iov_iter *i) in pipe_truncate() argument
1025 struct pipe_inode_info *pipe = i->pipe; in pipe_truncate()
1032 unsigned int i_head = i->head; in pipe_truncate()
1033 size_t off = i->iov_offset; in pipe_truncate()
1049 static void pipe_advance(struct iov_iter *i, size_t size) in pipe_advance() argument
1051 struct pipe_inode_info *pipe = i->pipe; in pipe_advance()
1052 if (unlikely(i->count < size)) in pipe_advance()
1053 size = i->count; in pipe_advance()
1057 unsigned int i_head = i->head; in pipe_advance()
1058 size_t off = i->iov_offset, left = size; in pipe_advance()
1069 i->head = i_head; in pipe_advance()
1070 i->iov_offset = buf->offset + left; in pipe_advance()
1072 i->count -= size; in pipe_advance()
1074 pipe_truncate(i); in pipe_advance()
1077 void iov_iter_advance(struct iov_iter *i, size_t size) in iov_iter_advance() argument
1079 if (unlikely(iov_iter_is_pipe(i))) { in iov_iter_advance()
1080 pipe_advance(i, size); in iov_iter_advance()
1083 if (unlikely(iov_iter_is_discard(i))) { in iov_iter_advance()
1084 i->count -= size; in iov_iter_advance()
1087 iterate_and_advance(i, size, v, 0, 0, 0) in iov_iter_advance()
1091 void iov_iter_revert(struct iov_iter *i, size_t unroll) in iov_iter_revert() argument
1097 i->count += unroll; in iov_iter_revert()
1098 if (unlikely(iov_iter_is_pipe(i))) { in iov_iter_revert()
1099 struct pipe_inode_info *pipe = i->pipe; in iov_iter_revert()
1101 unsigned int i_head = i->head; in iov_iter_revert()
1102 size_t off = i->iov_offset; in iov_iter_revert()
1111 if (!unroll && i_head == i->start_head) { in iov_iter_revert()
1119 i->iov_offset = off; in iov_iter_revert()
1120 i->head = i_head; in iov_iter_revert()
1121 pipe_truncate(i); in iov_iter_revert()
1124 if (unlikely(iov_iter_is_discard(i))) in iov_iter_revert()
1126 if (unroll <= i->iov_offset) { in iov_iter_revert()
1127 i->iov_offset -= unroll; in iov_iter_revert()
1130 unroll -= i->iov_offset; in iov_iter_revert()
1131 if (iov_iter_is_bvec(i)) { in iov_iter_revert()
1132 const struct bio_vec *bvec = i->bvec; in iov_iter_revert()
1135 i->nr_segs++; in iov_iter_revert()
1137 i->bvec = bvec; in iov_iter_revert()
1138 i->iov_offset = n - unroll; in iov_iter_revert()
1144 const struct iovec *iov = i->iov; in iov_iter_revert()
1147 i->nr_segs++; in iov_iter_revert()
1149 i->iov = iov; in iov_iter_revert()
1150 i->iov_offset = n - unroll; in iov_iter_revert()
1162 size_t iov_iter_single_seg_count(const struct iov_iter *i) in iov_iter_single_seg_count() argument
1164 if (unlikely(iov_iter_is_pipe(i))) in iov_iter_single_seg_count()
1165 return i->count; // it is a silly place, anyway in iov_iter_single_seg_count()
1166 if (i->nr_segs == 1) in iov_iter_single_seg_count()
1167 return i->count; in iov_iter_single_seg_count()
1168 if (unlikely(iov_iter_is_discard(i))) in iov_iter_single_seg_count()
1169 return i->count; in iov_iter_single_seg_count()
1170 else if (iov_iter_is_bvec(i)) in iov_iter_single_seg_count()
1171 return min(i->count, i->bvec->bv_len - i->iov_offset); in iov_iter_single_seg_count()
1173 return min(i->count, i->iov->iov_len - i->iov_offset); in iov_iter_single_seg_count()
1177 void iov_iter_kvec(struct iov_iter *i, unsigned int direction, in iov_iter_kvec() argument
1182 i->type = ITER_KVEC | (direction & (READ | WRITE)); in iov_iter_kvec()
1183 i->kvec = kvec; in iov_iter_kvec()
1184 i->nr_segs = nr_segs; in iov_iter_kvec()
1185 i->iov_offset = 0; in iov_iter_kvec()
1186 i->count = count; in iov_iter_kvec()
1190 void iov_iter_bvec(struct iov_iter *i, unsigned int direction, in iov_iter_bvec() argument
1195 i->type = ITER_BVEC | (direction & (READ | WRITE)); in iov_iter_bvec()
1196 i->bvec = bvec; in iov_iter_bvec()
1197 i->nr_segs = nr_segs; in iov_iter_bvec()
1198 i->iov_offset = 0; in iov_iter_bvec()
1199 i->count = count; in iov_iter_bvec()
1203 void iov_iter_pipe(struct iov_iter *i, unsigned int direction, in iov_iter_pipe() argument
1209 i->type = ITER_PIPE | READ; in iov_iter_pipe()
1210 i->pipe = pipe; in iov_iter_pipe()
1211 i->head = pipe->head; in iov_iter_pipe()
1212 i->iov_offset = 0; in iov_iter_pipe()
1213 i->count = count; in iov_iter_pipe()
1214 i->start_head = i->head; in iov_iter_pipe()
1219 * iov_iter_discard - Initialise an I/O iterator that discards data
1220 * @i: The iterator to initialise.
1222 * @count: The size of the I/O buffer in bytes.
1224 * Set up an I/O iterator that just discards everything that's written to it.
1227 void iov_iter_discard(struct iov_iter *i, unsigned int direction, size_t count) in iov_iter_discard() argument
1230 i->type = ITER_DISCARD | READ; in iov_iter_discard()
1231 i->count = count; in iov_iter_discard()
1232 i->iov_offset = 0; in iov_iter_discard()
1236 unsigned long iov_iter_alignment(const struct iov_iter *i) in iov_iter_alignment() argument
1239 size_t size = i->count; in iov_iter_alignment()
1241 if (unlikely(iov_iter_is_pipe(i))) { in iov_iter_alignment()
1242 unsigned int p_mask = i->pipe->ring_size - 1; in iov_iter_alignment()
1244 if (size && i->iov_offset && allocated(&i->pipe->bufs[i->head & p_mask])) in iov_iter_alignment()
1245 return size | i->iov_offset; in iov_iter_alignment()
1248 iterate_all_kinds(i, size, v, in iov_iter_alignment()
1257 unsigned long iov_iter_gap_alignment(const struct iov_iter *i) in iov_iter_gap_alignment() argument
1260 size_t size = i->count; in iov_iter_gap_alignment()
1262 if (unlikely(iov_iter_is_pipe(i) || iov_iter_is_discard(i))) { in iov_iter_gap_alignment()
1267 iterate_all_kinds(i, size, v, in iov_iter_gap_alignment()
1279 static inline ssize_t __pipe_get_pages(struct iov_iter *i, in __pipe_get_pages() argument
1285 struct pipe_inode_info *pipe = i->pipe; in __pipe_get_pages()
1287 ssize_t n = push_pipe(i, maxsize, &iter_head, start); in __pipe_get_pages()
1302 static ssize_t pipe_get_pages(struct iov_iter *i, in pipe_get_pages() argument
1312 if (!sanity(i)) in pipe_get_pages()
1315 data_start(i, &iter_head, start); in pipe_get_pages()
1317 npages = pipe_space_for_user(iter_head, i->pipe->tail, i->pipe); in pipe_get_pages()
1320 return __pipe_get_pages(i, min(maxsize, capacity), pages, iter_head, start); in pipe_get_pages()
1323 ssize_t iov_iter_get_pages(struct iov_iter *i, in iov_iter_get_pages() argument
1327 if (maxsize > i->count) in iov_iter_get_pages()
1328 maxsize = i->count; in iov_iter_get_pages()
1330 if (unlikely(iov_iter_is_pipe(i))) in iov_iter_get_pages()
1331 return pipe_get_pages(i, pages, maxsize, maxpages, start); in iov_iter_get_pages()
1332 if (unlikely(iov_iter_is_discard(i))) in iov_iter_get_pages()
1335 iterate_all_kinds(i, maxsize, v, ({ in iov_iter_get_pages()
1346 iov_iter_rw(i) != WRITE ? FOLL_WRITE : 0, in iov_iter_get_pages()
1369 static ssize_t pipe_get_pages_alloc(struct iov_iter *i, in pipe_get_pages_alloc() argument
1380 if (!sanity(i)) in pipe_get_pages_alloc()
1383 data_start(i, &iter_head, start); in pipe_get_pages_alloc()
1385 npages = pipe_space_for_user(iter_head, i->pipe->tail, i->pipe); in pipe_get_pages_alloc()
1394 n = __pipe_get_pages(i, maxsize, p, iter_head, start); in pipe_get_pages_alloc()
1402 ssize_t iov_iter_get_pages_alloc(struct iov_iter *i, in iov_iter_get_pages_alloc() argument
1408 if (maxsize > i->count) in iov_iter_get_pages_alloc()
1409 maxsize = i->count; in iov_iter_get_pages_alloc()
1411 if (unlikely(iov_iter_is_pipe(i))) in iov_iter_get_pages_alloc()
1412 return pipe_get_pages_alloc(i, pages, maxsize, start); in iov_iter_get_pages_alloc()
1413 if (unlikely(iov_iter_is_discard(i))) in iov_iter_get_pages_alloc()
1416 iterate_all_kinds(i, maxsize, v, ({ in iov_iter_get_pages_alloc()
1428 iov_iter_rw(i) != WRITE ? FOLL_WRITE : 0, p); in iov_iter_get_pages_alloc()
1453 struct iov_iter *i) in csum_and_copy_from_iter() argument
1459 if (unlikely(iov_iter_is_pipe(i) || iov_iter_is_discard(i))) { in csum_and_copy_from_iter()
1463 iterate_and_advance(i, bytes, v, ({ in csum_and_copy_from_iter()
1492 struct iov_iter *i) in csum_and_copy_from_iter_full() argument
1498 if (unlikely(iov_iter_is_pipe(i) || iov_iter_is_discard(i))) { in csum_and_copy_from_iter_full()
1502 if (unlikely(i->count < bytes)) in csum_and_copy_from_iter_full()
1504 iterate_all_kinds(i, bytes, v, ({ in csum_and_copy_from_iter_full()
1528 iov_iter_advance(i, bytes); in csum_and_copy_from_iter_full()
1534 struct iov_iter *i) in csum_and_copy_to_iter() argument
1541 if (unlikely(iov_iter_is_pipe(i))) in csum_and_copy_to_iter()
1542 return csum_and_copy_to_pipe_iter(addr, bytes, _csstate, i); in csum_and_copy_to_iter()
1546 if (unlikely(iov_iter_is_discard(i))) { in csum_and_copy_to_iter()
1550 iterate_and_advance(i, bytes, v, ({ in csum_and_copy_to_iter()
1580 struct iov_iter *i) in hash_and_copy_to_iter() argument
1587 copied = copy_to_iter(addr, bytes, i); in hash_and_copy_to_iter()
1598 int iov_iter_npages(const struct iov_iter *i, int maxpages) in iov_iter_npages() argument
1600 size_t size = i->count; in iov_iter_npages()
1605 if (unlikely(iov_iter_is_discard(i))) in iov_iter_npages()
1608 if (unlikely(iov_iter_is_pipe(i))) { in iov_iter_npages()
1609 struct pipe_inode_info *pipe = i->pipe; in iov_iter_npages()
1613 if (!sanity(i)) in iov_iter_npages()
1616 data_start(i, &iter_head, &off); in iov_iter_npages()
1621 } else iterate_all_kinds(i, size, v, ({ in iov_iter_npages()
1669 int ret = -EFAULT, i; in copy_compat_iovec_from_user() local
1674 for (i = 0; i < nr_segs; i++) { in copy_compat_iovec_from_user()
1678 unsafe_get_user(len, &uiov[i].iov_len, uaccess_end); in copy_compat_iovec_from_user()
1679 unsafe_get_user(buf, &uiov[i].iov_base, uaccess_end); in copy_compat_iovec_from_user()
1686 iov[i].iov_base = compat_ptr(buf); in copy_compat_iovec_from_user()
1687 iov[i].iov_len = len; in copy_compat_iovec_from_user()
1748 struct iov_iter *i, bool compat) in __import_iovec() argument
1785 iov_iter_init(i, type, iov, nr_segs, total_len); in __import_iovec()
1804 * @i: Pointer to iterator that will be initialized on success.
1817 struct iovec **iovp, struct iov_iter *i) in import_iovec() argument
1819 return __import_iovec(type, uvec, nr_segs, fast_segs, iovp, i, in import_iovec()
1825 struct iovec *iov, struct iov_iter *i) in import_single_range() argument
1834 iov_iter_init(i, rw, iov, 1, len); in import_single_range()
1839 int iov_iter_for_each_range(struct iov_iter *i, size_t bytes, in iov_iter_for_each_range() argument
1848 iterate_all_kinds(i, bytes, v, -EINVAL, ({ in iov_iter_for_each_range()