Lines Matching refs:i

20 #define iterate_buf(i, n, base, len, off, __p, STEP) {		\  argument
23 base = __p + i->iov_offset; \
25 i->iov_offset += len; \
30 #define iterate_iovec(i, n, base, len, off, __p, STEP) { \ argument
32 size_t skip = i->iov_offset; \
47 i->iov_offset = skip; \
51 #define iterate_bvec(i, n, base, len, off, p, STEP) { \ argument
53 unsigned skip = i->iov_offset; \
75 i->iov_offset = skip; \
79 #define iterate_xarray(i, n, base, len, __off, STEP) { \ argument
83 loff_t start = i->xarray_start + i->iov_offset; \
85 XA_STATE(xas, i->xarray, index); \
115 i->iov_offset += __off; \
119 #define __iterate_and_advance(i, n, base, len, off, I, K) { \ argument
120 if (unlikely(i->count < n)) \
121 n = i->count; \
123 if (likely(iter_is_ubuf(i))) { \
126 iterate_buf(i, n, base, len, off, \
127 i->ubuf, (I)) \
128 } else if (likely(iter_is_iovec(i))) { \
129 const struct iovec *iov = i->iov; \
132 iterate_iovec(i, n, base, len, off, \
134 i->nr_segs -= iov - i->iov; \
135 i->iov = iov; \
136 } else if (iov_iter_is_bvec(i)) { \
137 const struct bio_vec *bvec = i->bvec; \
140 iterate_bvec(i, n, base, len, off, \
142 i->nr_segs -= bvec - i->bvec; \
143 i->bvec = bvec; \
144 } else if (iov_iter_is_kvec(i)) { \
145 const struct kvec *kvec = i->kvec; \
148 iterate_iovec(i, n, base, len, off, \
150 i->nr_segs -= kvec - i->kvec; \
151 i->kvec = kvec; \
152 } else if (iov_iter_is_xarray(i)) { \
155 iterate_xarray(i, n, base, len, off, \
158 i->count -= n; \
161 #define iterate_and_advance(i, n, base, len, off, I, K) \ argument
162 __iterate_and_advance(i, n, base, len, off, I, ((void)(K),0))
196 static bool sanity(const struct iov_iter *i) in sanity() argument
198 struct pipe_inode_info *pipe = i->pipe; in sanity()
202 unsigned int i_head = i->head; in sanity()
205 if (i->last_offset) { in sanity()
213 if (unlikely(p->offset + p->len != abs(i->last_offset))) in sanity()
221 printk(KERN_ERR "idx = %d, offset = %d\n", i_head, i->last_offset); in sanity()
234 #define sanity(i) true argument
273 static struct page *append_pipe(struct iov_iter *i, size_t size, in append_pipe() argument
276 struct pipe_inode_info *pipe = i->pipe; in append_pipe()
277 int offset = i->last_offset; in append_pipe()
286 i->last_offset += size; in append_pipe()
287 i->count -= size; in append_pipe()
299 i->head = pipe->head - 1; in append_pipe()
300 i->last_offset = size; in append_pipe()
301 i->count -= size; in append_pipe()
306 struct iov_iter *i) in copy_page_to_iter_pipe() argument
308 struct pipe_inode_info *pipe = i->pipe; in copy_page_to_iter_pipe()
311 if (unlikely(bytes > i->count)) in copy_page_to_iter_pipe()
312 bytes = i->count; in copy_page_to_iter_pipe()
317 if (!sanity(i)) in copy_page_to_iter_pipe()
320 if (offset && i->last_offset == -offset) { // could we merge it? in copy_page_to_iter_pipe()
324 i->last_offset -= bytes; in copy_page_to_iter_pipe()
325 i->count -= bytes; in copy_page_to_iter_pipe()
333 i->last_offset = -(offset + bytes); in copy_page_to_iter_pipe()
334 i->head = head; in copy_page_to_iter_pipe()
335 i->count -= bytes; in copy_page_to_iter_pipe()
352 size_t fault_in_iov_iter_readable(const struct iov_iter *i, size_t size) in fault_in_iov_iter_readable() argument
354 if (iter_is_ubuf(i)) { in fault_in_iov_iter_readable()
355 size_t n = min(size, iov_iter_count(i)); in fault_in_iov_iter_readable()
356 n -= fault_in_readable(i->ubuf + i->iov_offset, n); in fault_in_iov_iter_readable()
358 } else if (iter_is_iovec(i)) { in fault_in_iov_iter_readable()
359 size_t count = min(size, iov_iter_count(i)); in fault_in_iov_iter_readable()
364 for (p = i->iov, skip = i->iov_offset; count; p++, skip = 0) { in fault_in_iov_iter_readable()
395 size_t fault_in_iov_iter_writeable(const struct iov_iter *i, size_t size) in fault_in_iov_iter_writeable() argument
397 if (iter_is_ubuf(i)) { in fault_in_iov_iter_writeable()
398 size_t n = min(size, iov_iter_count(i)); in fault_in_iov_iter_writeable()
399 n -= fault_in_safe_writeable(i->ubuf + i->iov_offset, n); in fault_in_iov_iter_writeable()
401 } else if (iter_is_iovec(i)) { in fault_in_iov_iter_writeable()
402 size_t count = min(size, iov_iter_count(i)); in fault_in_iov_iter_writeable()
407 for (p = i->iov, skip = i->iov_offset; count; p++, skip = 0) { in fault_in_iov_iter_writeable()
424 void iov_iter_init(struct iov_iter *i, unsigned int direction, in iov_iter_init() argument
429 *i = (struct iov_iter) { in iov_iter_init()
443 static inline unsigned int pipe_npages(const struct iov_iter *i, int *npages) in pipe_npages() argument
445 struct pipe_inode_info *pipe = i->pipe; in pipe_npages()
447 int off = i->last_offset; in pipe_npages()
459 struct iov_iter *i) in copy_pipe_to_iter() argument
463 if (unlikely(bytes > i->count)) in copy_pipe_to_iter()
464 bytes = i->count; in copy_pipe_to_iter()
468 if (!sanity(i)) in copy_pipe_to_iter()
472 struct page *page = append_pipe(i, n, &off); in copy_pipe_to_iter()
490 struct iov_iter *i, __wsum *sump) in csum_and_copy_to_pipe_iter() argument
496 if (unlikely(bytes > i->count)) in csum_and_copy_to_pipe_iter()
497 bytes = i->count; in csum_and_copy_to_pipe_iter()
501 if (!sanity(i)) in csum_and_copy_to_pipe_iter()
505 struct page *page = append_pipe(i, bytes, &r); in csum_and_copy_to_pipe_iter()
521 size_t _copy_to_iter(const void *addr, size_t bytes, struct iov_iter *i) in _copy_to_iter() argument
523 if (unlikely(iov_iter_is_pipe(i))) in _copy_to_iter()
524 return copy_pipe_to_iter(addr, bytes, i); in _copy_to_iter()
525 if (user_backed_iter(i)) in _copy_to_iter()
527 iterate_and_advance(i, bytes, base, len, off, in _copy_to_iter()
547 struct iov_iter *i) in copy_mc_pipe_to_iter() argument
552 if (unlikely(bytes > i->count)) in copy_mc_pipe_to_iter()
553 bytes = i->count; in copy_mc_pipe_to_iter()
557 if (!sanity(i)) in copy_mc_pipe_to_iter()
561 struct page *page = append_pipe(i, bytes, &off); in copy_mc_pipe_to_iter()
575 iov_iter_revert(i, rem); in copy_mc_pipe_to_iter()
607 size_t _copy_mc_to_iter(const void *addr, size_t bytes, struct iov_iter *i) in _copy_mc_to_iter() argument
609 if (unlikely(iov_iter_is_pipe(i))) in _copy_mc_to_iter()
610 return copy_mc_pipe_to_iter(addr, bytes, i); in _copy_mc_to_iter()
611 if (user_backed_iter(i)) in _copy_mc_to_iter()
613 __iterate_and_advance(i, bytes, base, len, off, in _copy_mc_to_iter()
623 size_t _copy_from_iter(void *addr, size_t bytes, struct iov_iter *i) in _copy_from_iter() argument
625 if (unlikely(iov_iter_is_pipe(i))) { in _copy_from_iter()
629 if (user_backed_iter(i)) in _copy_from_iter()
631 iterate_and_advance(i, bytes, base, len, off, in _copy_from_iter()
640 size_t _copy_from_iter_nocache(void *addr, size_t bytes, struct iov_iter *i) in _copy_from_iter_nocache() argument
642 if (unlikely(iov_iter_is_pipe(i))) { in _copy_from_iter_nocache()
646 iterate_and_advance(i, bytes, base, len, off, in _copy_from_iter_nocache()
672 size_t _copy_from_iter_flushcache(void *addr, size_t bytes, struct iov_iter *i) in _copy_from_iter_flushcache() argument
674 if (unlikely(iov_iter_is_pipe(i))) { in _copy_from_iter_flushcache()
678 iterate_and_advance(i, bytes, base, len, off, in _copy_from_iter_flushcache()
713 struct iov_iter *i) in copy_page_to_iter() argument
718 if (unlikely(iov_iter_is_pipe(i))) in copy_page_to_iter()
719 return copy_page_to_iter_pipe(page, offset, bytes, i); in copy_page_to_iter()
725 n = _copy_to_iter(kaddr + offset, n, i); in copy_page_to_iter()
742 struct iov_iter *i) in copy_page_from_iter() argument
752 n = _copy_from_iter(kaddr + offset, n, i); in copy_page_from_iter()
768 static size_t pipe_zero(size_t bytes, struct iov_iter *i) in pipe_zero() argument
772 if (unlikely(bytes > i->count)) in pipe_zero()
773 bytes = i->count; in pipe_zero()
777 if (!sanity(i)) in pipe_zero()
781 struct page *page = append_pipe(i, n, &off); in pipe_zero()
794 size_t iov_iter_zero(size_t bytes, struct iov_iter *i) in iov_iter_zero() argument
796 if (unlikely(iov_iter_is_pipe(i))) in iov_iter_zero()
797 return pipe_zero(bytes, i); in iov_iter_zero()
798 iterate_and_advance(i, bytes, base, len, count, in iov_iter_zero()
808 struct iov_iter *i) in copy_page_from_iter_atomic() argument
815 if (unlikely(iov_iter_is_pipe(i) || iov_iter_is_discard(i))) { in copy_page_from_iter_atomic()
820 iterate_and_advance(i, bytes, base, len, off, in copy_page_from_iter_atomic()
829 static void pipe_advance(struct iov_iter *i, size_t size) in pipe_advance() argument
831 struct pipe_inode_info *pipe = i->pipe; in pipe_advance()
832 int off = i->last_offset; in pipe_advance()
835 pipe_discard_from(pipe, i->start_head); // discard everything in pipe_advance()
838 i->count -= size; in pipe_advance()
840 struct pipe_buffer *buf = pipe_buf(pipe, i->head); in pipe_advance()
845 i->last_offset = last_offset(buf); in pipe_advance()
849 i->head++; in pipe_advance()
852 pipe_discard_from(pipe, i->head + 1); // discard everything past this one in pipe_advance()
855 static void iov_iter_bvec_advance(struct iov_iter *i, size_t size) in iov_iter_bvec_advance() argument
859 if (!i->count) in iov_iter_bvec_advance()
861 i->count -= size; in iov_iter_bvec_advance()
863 size += i->iov_offset; in iov_iter_bvec_advance()
865 for (bvec = i->bvec, end = bvec + i->nr_segs; bvec < end; bvec++) { in iov_iter_bvec_advance()
870 i->iov_offset = size; in iov_iter_bvec_advance()
871 i->nr_segs -= bvec - i->bvec; in iov_iter_bvec_advance()
872 i->bvec = bvec; in iov_iter_bvec_advance()
875 static void iov_iter_iovec_advance(struct iov_iter *i, size_t size) in iov_iter_iovec_advance() argument
879 if (!i->count) in iov_iter_iovec_advance()
881 i->count -= size; in iov_iter_iovec_advance()
883 size += i->iov_offset; // from beginning of current segment in iov_iter_iovec_advance()
884 for (iov = i->iov, end = iov + i->nr_segs; iov < end; iov++) { in iov_iter_iovec_advance()
889 i->iov_offset = size; in iov_iter_iovec_advance()
890 i->nr_segs -= iov - i->iov; in iov_iter_iovec_advance()
891 i->iov = iov; in iov_iter_iovec_advance()
894 void iov_iter_advance(struct iov_iter *i, size_t size) in iov_iter_advance() argument
896 if (unlikely(i->count < size)) in iov_iter_advance()
897 size = i->count; in iov_iter_advance()
898 if (likely(iter_is_ubuf(i)) || unlikely(iov_iter_is_xarray(i))) { in iov_iter_advance()
899 i->iov_offset += size; in iov_iter_advance()
900 i->count -= size; in iov_iter_advance()
901 } else if (likely(iter_is_iovec(i) || iov_iter_is_kvec(i))) { in iov_iter_advance()
903 iov_iter_iovec_advance(i, size); in iov_iter_advance()
904 } else if (iov_iter_is_bvec(i)) { in iov_iter_advance()
905 iov_iter_bvec_advance(i, size); in iov_iter_advance()
906 } else if (iov_iter_is_pipe(i)) { in iov_iter_advance()
907 pipe_advance(i, size); in iov_iter_advance()
908 } else if (iov_iter_is_discard(i)) { in iov_iter_advance()
909 i->count -= size; in iov_iter_advance()
914 void iov_iter_revert(struct iov_iter *i, size_t unroll) in iov_iter_revert() argument
920 i->count += unroll; in iov_iter_revert()
921 if (unlikely(iov_iter_is_pipe(i))) { in iov_iter_revert()
922 struct pipe_inode_info *pipe = i->pipe; in iov_iter_revert()
925 while (head > i->start_head) { in iov_iter_revert()
929 i->last_offset = last_offset(b); in iov_iter_revert()
930 i->head = head; in iov_iter_revert()
937 i->last_offset = 0; in iov_iter_revert()
938 i->head = head; in iov_iter_revert()
941 if (unlikely(iov_iter_is_discard(i))) in iov_iter_revert()
943 if (unroll <= i->iov_offset) { in iov_iter_revert()
944 i->iov_offset -= unroll; in iov_iter_revert()
947 unroll -= i->iov_offset; in iov_iter_revert()
948 if (iov_iter_is_xarray(i) || iter_is_ubuf(i)) { in iov_iter_revert()
953 } else if (iov_iter_is_bvec(i)) { in iov_iter_revert()
954 const struct bio_vec *bvec = i->bvec; in iov_iter_revert()
957 i->nr_segs++; in iov_iter_revert()
959 i->bvec = bvec; in iov_iter_revert()
960 i->iov_offset = n - unroll; in iov_iter_revert()
966 const struct iovec *iov = i->iov; in iov_iter_revert()
969 i->nr_segs++; in iov_iter_revert()
971 i->iov = iov; in iov_iter_revert()
972 i->iov_offset = n - unroll; in iov_iter_revert()
984 size_t iov_iter_single_seg_count(const struct iov_iter *i) in iov_iter_single_seg_count() argument
986 if (i->nr_segs > 1) { in iov_iter_single_seg_count()
987 if (likely(iter_is_iovec(i) || iov_iter_is_kvec(i))) in iov_iter_single_seg_count()
988 return min(i->count, i->iov->iov_len - i->iov_offset); in iov_iter_single_seg_count()
989 if (iov_iter_is_bvec(i)) in iov_iter_single_seg_count()
990 return min(i->count, i->bvec->bv_len - i->iov_offset); in iov_iter_single_seg_count()
992 return i->count; in iov_iter_single_seg_count()
996 void iov_iter_kvec(struct iov_iter *i, unsigned int direction, in iov_iter_kvec() argument
1001 *i = (struct iov_iter){ in iov_iter_kvec()
1012 void iov_iter_bvec(struct iov_iter *i, unsigned int direction, in iov_iter_bvec() argument
1017 *i = (struct iov_iter){ in iov_iter_bvec()
1028 void iov_iter_pipe(struct iov_iter *i, unsigned int direction, in iov_iter_pipe() argument
1034 *i = (struct iov_iter){ in iov_iter_pipe()
1059 void iov_iter_xarray(struct iov_iter *i, unsigned int direction, in iov_iter_xarray() argument
1063 *i = (struct iov_iter) { in iov_iter_xarray()
1083 void iov_iter_discard(struct iov_iter *i, unsigned int direction, size_t count) in iov_iter_discard() argument
1086 *i = (struct iov_iter){ in iov_iter_discard()
1095 static bool iov_iter_aligned_iovec(const struct iov_iter *i, unsigned addr_mask, in iov_iter_aligned_iovec() argument
1098 size_t size = i->count; in iov_iter_aligned_iovec()
1099 size_t skip = i->iov_offset; in iov_iter_aligned_iovec()
1102 for (k = 0; k < i->nr_segs; k++, skip = 0) { in iov_iter_aligned_iovec()
1103 size_t len = i->iov[k].iov_len - skip; in iov_iter_aligned_iovec()
1109 if ((unsigned long)(i->iov[k].iov_base + skip) & addr_mask) in iov_iter_aligned_iovec()
1119 static bool iov_iter_aligned_bvec(const struct iov_iter *i, unsigned addr_mask, in iov_iter_aligned_bvec() argument
1122 size_t size = i->count; in iov_iter_aligned_bvec()
1123 unsigned skip = i->iov_offset; in iov_iter_aligned_bvec()
1126 for (k = 0; k < i->nr_segs; k++, skip = 0) { in iov_iter_aligned_bvec()
1127 size_t len = i->bvec[k].bv_len - skip; in iov_iter_aligned_bvec()
1133 if ((unsigned long)(i->bvec[k].bv_offset + skip) & addr_mask) in iov_iter_aligned_bvec()
1153 bool iov_iter_is_aligned(const struct iov_iter *i, unsigned addr_mask, in iov_iter_is_aligned() argument
1156 if (likely(iter_is_ubuf(i))) { in iov_iter_is_aligned()
1157 if (i->count & len_mask) in iov_iter_is_aligned()
1159 if ((unsigned long)(i->ubuf + i->iov_offset) & addr_mask) in iov_iter_is_aligned()
1164 if (likely(iter_is_iovec(i) || iov_iter_is_kvec(i))) in iov_iter_is_aligned()
1165 return iov_iter_aligned_iovec(i, addr_mask, len_mask); in iov_iter_is_aligned()
1167 if (iov_iter_is_bvec(i)) in iov_iter_is_aligned()
1168 return iov_iter_aligned_bvec(i, addr_mask, len_mask); in iov_iter_is_aligned()
1170 if (iov_iter_is_pipe(i)) { in iov_iter_is_aligned()
1171 size_t size = i->count; in iov_iter_is_aligned()
1175 if (size && i->last_offset > 0) { in iov_iter_is_aligned()
1176 if (i->last_offset & addr_mask) in iov_iter_is_aligned()
1183 if (iov_iter_is_xarray(i)) { in iov_iter_is_aligned()
1184 if (i->count & len_mask) in iov_iter_is_aligned()
1186 if ((i->xarray_start + i->iov_offset) & addr_mask) in iov_iter_is_aligned()
1194 static unsigned long iov_iter_alignment_iovec(const struct iov_iter *i) in iov_iter_alignment_iovec() argument
1197 size_t size = i->count; in iov_iter_alignment_iovec()
1198 size_t skip = i->iov_offset; in iov_iter_alignment_iovec()
1201 for (k = 0; k < i->nr_segs; k++, skip = 0) { in iov_iter_alignment_iovec()
1202 size_t len = i->iov[k].iov_len - skip; in iov_iter_alignment_iovec()
1204 res |= (unsigned long)i->iov[k].iov_base + skip; in iov_iter_alignment_iovec()
1216 static unsigned long iov_iter_alignment_bvec(const struct iov_iter *i) in iov_iter_alignment_bvec() argument
1219 size_t size = i->count; in iov_iter_alignment_bvec()
1220 unsigned skip = i->iov_offset; in iov_iter_alignment_bvec()
1223 for (k = 0; k < i->nr_segs; k++, skip = 0) { in iov_iter_alignment_bvec()
1224 size_t len = i->bvec[k].bv_len - skip; in iov_iter_alignment_bvec()
1225 res |= (unsigned long)i->bvec[k].bv_offset + skip; in iov_iter_alignment_bvec()
1236 unsigned long iov_iter_alignment(const struct iov_iter *i) in iov_iter_alignment() argument
1238 if (likely(iter_is_ubuf(i))) { in iov_iter_alignment()
1239 size_t size = i->count; in iov_iter_alignment()
1241 return ((unsigned long)i->ubuf + i->iov_offset) | size; in iov_iter_alignment()
1246 if (likely(iter_is_iovec(i) || iov_iter_is_kvec(i))) in iov_iter_alignment()
1247 return iov_iter_alignment_iovec(i); in iov_iter_alignment()
1249 if (iov_iter_is_bvec(i)) in iov_iter_alignment()
1250 return iov_iter_alignment_bvec(i); in iov_iter_alignment()
1252 if (iov_iter_is_pipe(i)) { in iov_iter_alignment()
1253 size_t size = i->count; in iov_iter_alignment()
1255 if (size && i->last_offset > 0) in iov_iter_alignment()
1256 return size | i->last_offset; in iov_iter_alignment()
1260 if (iov_iter_is_xarray(i)) in iov_iter_alignment()
1261 return (i->xarray_start + i->iov_offset) | i->count; in iov_iter_alignment()
1267 unsigned long iov_iter_gap_alignment(const struct iov_iter *i) in iov_iter_gap_alignment() argument
1271 size_t size = i->count; in iov_iter_gap_alignment()
1274 if (iter_is_ubuf(i)) in iov_iter_gap_alignment()
1277 if (WARN_ON(!iter_is_iovec(i))) in iov_iter_gap_alignment()
1280 for (k = 0; k < i->nr_segs; k++) { in iov_iter_gap_alignment()
1281 if (i->iov[k].iov_len) { in iov_iter_gap_alignment()
1282 unsigned long base = (unsigned long)i->iov[k].iov_base; in iov_iter_gap_alignment()
1285 v = base + i->iov[k].iov_len; in iov_iter_gap_alignment()
1286 if (size <= i->iov[k].iov_len) in iov_iter_gap_alignment()
1288 size -= i->iov[k].iov_len; in iov_iter_gap_alignment()
1311 static ssize_t pipe_get_pages(struct iov_iter *i, in pipe_get_pages() argument
1319 if (!sanity(i)) in pipe_get_pages()
1322 *start = off = pipe_npages(i, &npages); in pipe_get_pages()
1330 struct page *page = append_pipe(i, left, &off); in pipe_get_pages()
1368 static ssize_t iter_xarray_get_pages(struct iov_iter *i, in iter_xarray_get_pages() argument
1376 pos = i->xarray_start + i->iov_offset; in iter_xarray_get_pages()
1384 nr = iter_xarray_populate_pages(*pages, i->xarray, index, count); in iter_xarray_get_pages()
1389 i->iov_offset += maxsize; in iter_xarray_get_pages()
1390 i->count -= maxsize; in iter_xarray_get_pages()
1395 static unsigned long first_iovec_segment(const struct iov_iter *i, size_t *size) in first_iovec_segment() argument
1400 if (iter_is_ubuf(i)) in first_iovec_segment()
1401 return (unsigned long)i->ubuf + i->iov_offset; in first_iovec_segment()
1403 for (k = 0, skip = i->iov_offset; k < i->nr_segs; k++, skip = 0) { in first_iovec_segment()
1404 size_t len = i->iov[k].iov_len - skip; in first_iovec_segment()
1410 return (unsigned long)i->iov[k].iov_base + skip; in first_iovec_segment()
1416 static struct page *first_bvec_segment(const struct iov_iter *i, in first_bvec_segment() argument
1420 size_t skip = i->iov_offset, len; in first_bvec_segment()
1422 len = i->bvec->bv_len - skip; in first_bvec_segment()
1425 skip += i->bvec->bv_offset; in first_bvec_segment()
1426 page = i->bvec->bv_page + skip / PAGE_SIZE; in first_bvec_segment()
1431 static ssize_t __iov_iter_get_pages_alloc(struct iov_iter *i, in __iov_iter_get_pages_alloc() argument
1437 if (maxsize > i->count) in __iov_iter_get_pages_alloc()
1438 maxsize = i->count; in __iov_iter_get_pages_alloc()
1444 if (likely(user_backed_iter(i))) { in __iov_iter_get_pages_alloc()
1449 if (iov_iter_rw(i) != WRITE) in __iov_iter_get_pages_alloc()
1451 if (i->nofault) in __iov_iter_get_pages_alloc()
1454 addr = first_iovec_segment(i, &maxsize); in __iov_iter_get_pages_alloc()
1464 iov_iter_advance(i, maxsize); in __iov_iter_get_pages_alloc()
1467 if (iov_iter_is_bvec(i)) { in __iov_iter_get_pages_alloc()
1471 page = first_bvec_segment(i, &maxsize, start); in __iov_iter_get_pages_alloc()
1479 i->count -= maxsize; in __iov_iter_get_pages_alloc()
1480 i->iov_offset += maxsize; in __iov_iter_get_pages_alloc()
1481 if (i->iov_offset == i->bvec->bv_len) { in __iov_iter_get_pages_alloc()
1482 i->iov_offset = 0; in __iov_iter_get_pages_alloc()
1483 i->bvec++; in __iov_iter_get_pages_alloc()
1484 i->nr_segs--; in __iov_iter_get_pages_alloc()
1488 if (iov_iter_is_pipe(i)) in __iov_iter_get_pages_alloc()
1489 return pipe_get_pages(i, pages, maxsize, maxpages, start); in __iov_iter_get_pages_alloc()
1490 if (iov_iter_is_xarray(i)) in __iov_iter_get_pages_alloc()
1491 return iter_xarray_get_pages(i, pages, maxsize, maxpages, start); in __iov_iter_get_pages_alloc()
1495 ssize_t iov_iter_get_pages2(struct iov_iter *i, in iov_iter_get_pages2() argument
1503 return __iov_iter_get_pages_alloc(i, &pages, maxsize, maxpages, start); in iov_iter_get_pages2()
1507 ssize_t iov_iter_get_pages_alloc2(struct iov_iter *i, in iov_iter_get_pages_alloc2() argument
1515 len = __iov_iter_get_pages_alloc(i, pages, maxsize, ~0U, start); in iov_iter_get_pages_alloc2()
1525 struct iov_iter *i) in csum_and_copy_from_iter() argument
1529 if (unlikely(iov_iter_is_pipe(i) || iov_iter_is_discard(i))) { in csum_and_copy_from_iter()
1533 iterate_and_advance(i, bytes, base, len, off, ({ in csum_and_copy_from_iter()
1547 struct iov_iter *i) in csum_and_copy_to_iter() argument
1552 if (unlikely(iov_iter_is_discard(i))) { in csum_and_copy_to_iter()
1558 if (unlikely(iov_iter_is_pipe(i))) in csum_and_copy_to_iter()
1559 bytes = csum_and_copy_to_pipe_iter(addr, bytes, i, &sum); in csum_and_copy_to_iter()
1560 else iterate_and_advance(i, bytes, base, len, off, ({ in csum_and_copy_to_iter()
1575 struct iov_iter *i) in hash_and_copy_to_iter() argument
1582 copied = copy_to_iter(addr, bytes, i); in hash_and_copy_to_iter()
1593 static int iov_npages(const struct iov_iter *i, int maxpages) in iov_npages() argument
1595 size_t skip = i->iov_offset, size = i->count; in iov_npages()
1599 for (p = i->iov; size; skip = 0, p++) { in iov_npages()
1613 static int bvec_npages(const struct iov_iter *i, int maxpages) in bvec_npages() argument
1615 size_t skip = i->iov_offset, size = i->count; in bvec_npages()
1619 for (p = i->bvec; size; skip = 0, p++) { in bvec_npages()
1631 int iov_iter_npages(const struct iov_iter *i, int maxpages) in iov_iter_npages() argument
1633 if (unlikely(!i->count)) in iov_iter_npages()
1635 if (likely(iter_is_ubuf(i))) { in iov_iter_npages()
1636 unsigned offs = offset_in_page(i->ubuf + i->iov_offset); in iov_iter_npages()
1637 int npages = DIV_ROUND_UP(offs + i->count, PAGE_SIZE); in iov_iter_npages()
1641 if (likely(iter_is_iovec(i) || iov_iter_is_kvec(i))) in iov_iter_npages()
1642 return iov_npages(i, maxpages); in iov_iter_npages()
1643 if (iov_iter_is_bvec(i)) in iov_iter_npages()
1644 return bvec_npages(i, maxpages); in iov_iter_npages()
1645 if (iov_iter_is_pipe(i)) { in iov_iter_npages()
1648 if (!sanity(i)) in iov_iter_npages()
1651 pipe_npages(i, &npages); in iov_iter_npages()
1654 if (iov_iter_is_xarray(i)) { in iov_iter_npages()
1655 unsigned offset = (i->xarray_start + i->iov_offset) % PAGE_SIZE; in iov_iter_npages()
1656 int npages = DIV_ROUND_UP(offset + i->count, PAGE_SIZE); in iov_iter_npages()
1688 int ret = -EFAULT, i; in copy_compat_iovec_from_user() local
1693 for (i = 0; i < nr_segs; i++) { in copy_compat_iovec_from_user()
1697 unsafe_get_user(len, &uiov[i].iov_len, uaccess_end); in copy_compat_iovec_from_user()
1698 unsafe_get_user(buf, &uiov[i].iov_base, uaccess_end); in copy_compat_iovec_from_user()
1705 iov[i].iov_base = compat_ptr(buf); in copy_compat_iovec_from_user()
1706 iov[i].iov_len = len; in copy_compat_iovec_from_user()
1767 struct iov_iter *i, bool compat) in __import_iovec() argument
1804 iov_iter_init(i, type, iov, nr_segs, total_len); in __import_iovec()
1836 struct iovec **iovp, struct iov_iter *i) in import_iovec() argument
1838 return __import_iovec(type, uvec, nr_segs, fast_segs, iovp, i, in import_iovec()
1844 struct iovec *iov, struct iov_iter *i) in import_single_range() argument
1853 iov_iter_init(i, rw, iov, 1, len); in import_single_range()
1870 void iov_iter_restore(struct iov_iter *i, struct iov_iter_state *state) in iov_iter_restore() argument
1872 if (WARN_ON_ONCE(!iov_iter_is_bvec(i) && !iter_is_iovec(i)) && in iov_iter_restore()
1873 !iov_iter_is_kvec(i) && !iter_is_ubuf(i)) in iov_iter_restore()
1875 i->iov_offset = state->iov_offset; in iov_iter_restore()
1876 i->count = state->count; in iov_iter_restore()
1877 if (iter_is_ubuf(i)) in iov_iter_restore()
1889 if (iov_iter_is_bvec(i)) in iov_iter_restore()
1890 i->bvec -= state->nr_segs - i->nr_segs; in iov_iter_restore()
1892 i->iov -= state->nr_segs - i->nr_segs; in iov_iter_restore()
1893 i->nr_segs = state->nr_segs; in iov_iter_restore()