Lines Matching refs:vmf
1985 struct vm_fault vmf; in do_page_mkwrite() local
1988 vmf.virtual_address = (void __user *)(address & PAGE_MASK); in do_page_mkwrite()
1989 vmf.pgoff = page->index; in do_page_mkwrite()
1990 vmf.flags = FAULT_FLAG_WRITE|FAULT_FLAG_MKWRITE; in do_page_mkwrite()
1991 vmf.page = page; in do_page_mkwrite()
1993 ret = vma->vm_ops->page_mkwrite(vma, &vmf); in do_page_mkwrite()
2666 struct vm_fault vmf; in __do_fault() local
2669 vmf.virtual_address = (void __user *)(address & PAGE_MASK); in __do_fault()
2670 vmf.pgoff = pgoff; in __do_fault()
2671 vmf.flags = flags; in __do_fault()
2672 vmf.page = NULL; in __do_fault()
2674 ret = vma->vm_ops->fault(vma, &vmf); in __do_fault()
2678 if (unlikely(PageHWPoison(vmf.page))) { in __do_fault()
2680 unlock_page(vmf.page); in __do_fault()
2681 page_cache_release(vmf.page); in __do_fault()
2686 lock_page(vmf.page); in __do_fault()
2688 VM_BUG_ON_PAGE(!PageLocked(vmf.page), vmf.page); in __do_fault()
2690 *page = vmf.page; in __do_fault()
2802 struct vm_fault vmf; in do_fault_around() local
2832 vmf.virtual_address = (void __user *) start_addr; in do_fault_around()
2833 vmf.pte = pte; in do_fault_around()
2834 vmf.pgoff = pgoff; in do_fault_around()
2835 vmf.max_pgoff = max_pgoff; in do_fault_around()
2836 vmf.flags = flags; in do_fault_around()
2837 vma->vm_ops->map_pages(vma, &vmf); in do_fault_around()