Lines Matching refs:start
478 unsigned long start, unsigned long nr_pages, in __get_user_pages() argument
505 if (!vma || start >= vma->vm_end) { in __get_user_pages()
506 vma = find_extend_vma(mm, start); in __get_user_pages()
507 if (!vma && in_gate_area(mm, start)) { in __get_user_pages()
509 ret = get_gate_page(mm, start & PAGE_MASK, in __get_user_pages()
524 &start, &nr_pages, i, in __get_user_pages()
541 page = follow_page_mask(vma, start, foll_flags, &page_mask); in __get_user_pages()
544 ret = faultin_page(tsk, vma, start, &foll_flags, in __get_user_pages()
570 flush_anon_page(vma, page, start); in __get_user_pages()
579 page_increm = 1 + (~(start >> PAGE_SHIFT) & page_mask); in __get_user_pages()
583 start += page_increm * PAGE_SIZE; in __get_user_pages()
653 unsigned long start, in __get_user_pages_locked() argument
676 ret = __get_user_pages(tsk, mm, start, nr_pages, flags, pages, in __get_user_pages_locked()
706 start += ret << PAGE_SHIFT; in __get_user_pages_locked()
716 ret = __get_user_pages(tsk, mm, start, 1, flags | FOLL_TRIED, in __get_user_pages_locked()
729 start += PAGE_SIZE; in __get_user_pages_locked()
764 unsigned long start, unsigned long nr_pages, in get_user_pages_locked() argument
768 return __get_user_pages_locked(tsk, mm, start, nr_pages, in get_user_pages_locked()
785 unsigned long start, unsigned long nr_pages, in __get_user_pages_unlocked() argument
792 ret = __get_user_pages_locked(tsk, mm, start, nr_pages, pages, NULL, in __get_user_pages_unlocked()
818 unsigned long start, unsigned long nr_pages, in get_user_pages_unlocked() argument
821 return __get_user_pages_unlocked(tsk, mm, start, nr_pages, in get_user_pages_unlocked()
882 unsigned long start, unsigned long nr_pages, in get_user_pages() argument
886 return __get_user_pages_locked(tsk, mm, start, nr_pages, in get_user_pages()
912 unsigned long start, unsigned long end, int *nonblocking) in populate_vma_page_range() argument
915 unsigned long nr_pages = (end - start) / PAGE_SIZE; in populate_vma_page_range()
918 VM_BUG_ON(start & ~PAGE_MASK); in populate_vma_page_range()
920 VM_BUG_ON_VMA(start < vma->vm_start, vma); in populate_vma_page_range()
947 return __get_user_pages(current, mm, start, nr_pages, gup_flags, in populate_vma_page_range()
958 int __mm_populate(unsigned long start, unsigned long len, int ignore_errors) in __mm_populate() argument
966 end = start + len; in __mm_populate()
968 for (nstart = start; nstart < end; nstart = nend) { in __mm_populate()
1369 int __get_user_pages_fast(unsigned long start, int nr_pages, int write, in __get_user_pages_fast() argument
1378 start &= PAGE_MASK; in __get_user_pages_fast()
1379 addr = start; in __get_user_pages_fast()
1381 end = start + len; in __get_user_pages_fast()
1384 start, len))) in __get_user_pages_fast()
1452 int get_user_pages_fast(unsigned long start, int nr_pages, int write, in get_user_pages_fast() argument
1458 start &= PAGE_MASK; in get_user_pages_fast()
1459 nr = __get_user_pages_fast(start, nr_pages, write, pages); in get_user_pages_fast()
1464 start += nr << PAGE_SHIFT; in get_user_pages_fast()
1467 ret = get_user_pages_unlocked(current, mm, start, in get_user_pages_fast()