/kernel/linux/linux-5.10/mm/ |
D | nommu.c | 104 return vma->vm_end - vma->vm_start; in kobjsize() 443 BUG_ON(last->vm_end <= last->vm_start); in validate_nommu_regions() 450 BUG_ON(region->vm_end <= region->vm_start); in validate_nommu_regions() 452 BUG_ON(region->vm_start < last->vm_top); in validate_nommu_regions() 478 if (region->vm_start < pregion->vm_start) in add_nommu_region() 480 else if (region->vm_start > pregion->vm_start) in add_nommu_region() 531 if (region->vm_top > region->vm_start) in __put_nommu_region() 541 free_page_series(region->vm_start, region->vm_top); in __put_nommu_region() 594 if (vma->vm_start < pvma->vm_start) in add_vma_to_mm() 596 else if (vma->vm_start > pvma->vm_start) { in add_vma_to_mm() [all …]
|
D | mmap.c | 343 if (vma->vm_start < prev) { in browse_rb() 345 vma->vm_start, prev); in browse_rb() 348 if (vma->vm_start < pend) { in browse_rb() 350 vma->vm_start, pend); in browse_rb() 353 if (vma->vm_start > vma->vm_end) { in browse_rb() 355 vma->vm_start, vma->vm_end); in browse_rb() 368 prev = vma->vm_start; in browse_rb() 548 if (vma_tmp->vm_start < end) in find_vma_links() 621 max(addr, vma->vm_start)) >> PAGE_SHIFT; in count_vma_pages_range() 627 if (vma->vm_start > end) in count_vma_pages_range() [all …]
|
D | mlock.c | 540 pgoff = vma->vm_pgoff + ((start - vma->vm_start) >> PAGE_SHIFT); in mlock_fixup() 549 if (start != vma->vm_start) { in mlock_fixup() 603 if (!vma || vma->vm_start > start) in apply_vma_lock_flags() 607 if (start > vma->vm_start) in apply_vma_lock_flags() 629 if (!vma || vma->vm_start != nstart) { in apply_vma_lock_flags() 660 if (start + len <= vma->vm_start) in count_mm_mlocked_page_nr() 663 if (start > vma->vm_start) in count_mm_mlocked_page_nr() 664 count -= (start - vma->vm_start); in count_mm_mlocked_page_nr() 666 count += start + len - vma->vm_start; in count_mm_mlocked_page_nr() 669 count += vma->vm_end - vma->vm_start; in count_mm_mlocked_page_nr() [all …]
|
D | mremap.c | 366 new_pgoff = vma->vm_pgoff + ((old_addr - vma->vm_start) >> PAGE_SHIFT); in move_vma() 401 excess = vma->vm_end - vma->vm_start - old_len; in move_vma() 402 if (old_addr > vma->vm_start && in move_vma() 480 if (!vma || vma->vm_start > addr) in vma_to_resize() 511 pgoff = (addr - vma->vm_start) >> PAGE_SHIFT; in vma_to_resize() 614 ((addr - vma->vm_start) >> PAGE_SHIFT), in mremap_to() 641 if (vma->vm_next && vma->vm_next->vm_start < end) /* intersection */ in vma_expandable() 643 if (get_unmapped_area(NULL, vma->vm_start, end - vma->vm_start, in vma_expandable() 758 if (vma_adjust(vma, vma->vm_start, addr + new_len, in SYSCALL_DEFINE5() 787 ((addr - vma->vm_start) >> PAGE_SHIFT), in SYSCALL_DEFINE5()
|
D | msync.c | 71 if (start < vma->vm_start) { in SYSCALL_DEFINE3() 72 start = vma->vm_start; in SYSCALL_DEFINE3() 84 fstart = (start - vma->vm_start) + in SYSCALL_DEFINE3()
|
D | pagewalk.c | 405 } else if (start < vma->vm_start) { /* outside vma */ in walk_page_range() 407 next = min(end, vma->vm_start); in walk_page_range() 477 err = walk_page_test(vma->vm_start, vma->vm_end, &walk); in walk_page_vma() 482 return __walk_page_range(vma->vm_start, vma->vm_end, &walk); in walk_page_vma() 539 start_addr = ((cba - vba) << PAGE_SHIFT) + vma->vm_start; in walk_page_mapping() 540 end_addr = ((cea - vba) << PAGE_SHIFT) + vma->vm_start; in walk_page_mapping() 547 err = walk_page_test(vma->vm_start, vma->vm_end, &walk); in walk_page_mapping()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/ |
D | drm_vm.c | 83 if (efi_range_is_wc(vma->vm_start, vma->vm_end - in drm_io_prot() 84 vma->vm_start)) in drm_io_prot() 144 resource_size_t offset = vmf->address - vma->vm_start; in drm_vm_fault() 215 offset = vmf->address - vma->vm_start; in drm_vm_shm_fault() 245 vma->vm_start, vma->vm_end - vma->vm_start); in drm_vm_shm_close() 319 offset = vmf->address - vma->vm_start; in drm_vm_dma_fault() 356 offset = vmf->address - vma->vm_start; in drm_vm_sg_fault() 400 vma->vm_start, vma->vm_end - vma->vm_start); in drm_vm_open_locked() 426 vma->vm_start, vma->vm_end - vma->vm_start); in drm_vm_close_locked() 470 unsigned long length = vma->vm_end - vma->vm_start; in drm_mmap_dma() [all …]
|
/kernel/linux/linux-5.10/arch/parisc/mm/ |
D | fault.c | 128 if (tree->vm_start > addr) { 134 if (prev->vm_next->vm_start > addr) 255 vma->vm_start, vma->vm_end); in show_signal_msg() 289 if (!vma || address < vma->vm_start) in do_page_fault() 359 address < vma->vm_start || address >= vma->vm_end) { in do_page_fault()
|
/kernel/linux/linux-5.10/drivers/vfio/pci/ |
D | vfio_pci_nvlink2.c | 122 unsigned long vmf_off = (vmf->address - vma->vm_start) >> PAGE_SHIFT; in vfio_pci_nvgpu_mmap_fault() 148 if (vma->vm_end - vma->vm_start > data->size) in vfio_pci_nvgpu_mmap() 161 data->useraddr = vma->vm_start; in vfio_pci_nvgpu_mmap() 169 vma->vm_end - vma->vm_start, ret); in vfio_pci_nvgpu_mmap() 336 unsigned long req_len = vma->vm_end - vma->vm_start; in vfio_pci_npu2_mmap() 344 ret = remap_pfn_range(vma, vma->vm_start, data->mmio_atsd >> PAGE_SHIFT, in vfio_pci_npu2_mmap() 346 trace_vfio_pci_npu2_mmap(vdev->pdev, data->mmio_atsd, vma->vm_start, in vfio_pci_npu2_mmap() 347 vma->vm_end - vma->vm_start, ret); in vfio_pci_npu2_mmap()
|
/kernel/linux/linux-5.10/arch/powerpc/platforms/powernv/ |
D | vas-api.c | 159 if ((vma->vm_end - vma->vm_start) > PAGE_SIZE) { in coproc_mmap() 161 (vma->vm_end - vma->vm_start), PAGE_SIZE); in coproc_mmap() 180 rc = remap_pfn_range(vma, vma->vm_start, pfn + vma->vm_pgoff, in coproc_mmap() 181 vma->vm_end - vma->vm_start, prot); in coproc_mmap() 184 paste_addr, vma->vm_start, rc); in coproc_mmap()
|
/kernel/linux/linux-5.10/scripts/coccinelle/api/ |
D | vma_pages.cocci | 22 * (vma->vm_end - vma->vm_start) >> PAGE_SHIFT 32 - ((vma->vm_end - vma->vm_start) >> PAGE_SHIFT) 44 (vma->vm_end@p - vma->vm_start) >> PAGE_SHIFT
|
/kernel/linux/linux-5.10/fs/proc/ |
D | task_nommu.c | 37 size += region->vm_end - region->vm_start; in task_mem() 39 size = vma->vm_end - vma->vm_start; in task_mem() 92 vsize += vma->vm_end - vma->vm_start; in task_vsize() 114 size += region->vm_end - region->vm_start; in task_statm() 138 return vma->vm_start <= mm->start_stack && in is_stack() 167 vma->vm_start, in nommu_vma_show()
|
/kernel/linux/linux-5.10/arch/ia64/include/asm/ |
D | fb.h | 13 if (efi_range_is_wc(vma->vm_start, vma->vm_end - vma->vm_start)) in fb_pgprotect()
|
/kernel/linux/linux-5.10/arch/csky/include/asm/ |
D | tlb.h | 12 flush_cache_range(vma, (vma)->vm_start, (vma)->vm_end); \ 18 flush_tlb_range(vma, (vma)->vm_start, (vma)->vm_end); \
|
/kernel/linux/linux-5.10/drivers/char/ |
D | mspec.c | 75 unsigned long vm_start; /* Original (unsplit) base. */ member 114 last_index = (vdata->vm_end - vdata->vm_start) >> PAGE_SHIFT; in mspec_close() 202 vdata->vm_start = vma->vm_start; in mspec_mmap()
|
/kernel/linux/linux-5.10/drivers/media/v4l2-core/ |
D | videobuf-vmalloc.c | 56 map->count, vma->vm_start, vma->vm_end); in videobuf_vm_open() 68 map->count, vma->vm_start, vma->vm_end); in videobuf_vm_close() 227 buf->baddr = vma->vm_start; in __videobuf_mmap_mapper() 233 pages = PAGE_ALIGN(vma->vm_end - vma->vm_start); in __videobuf_mmap_mapper() 254 map, q, vma->vm_start, vma->vm_end, in __videobuf_mmap_mapper()
|
/kernel/linux/linux-5.10/arch/x86/entry/vdso/ |
D | vma.c | 84 regs->ip = new_vma->vm_start + vdso_land; in vdso_fix_landing() 92 unsigned long new_size = new_vma->vm_end - new_vma->vm_start; in vdso_mremap() 99 current->mm->context.vdso = (void __user *)new_vma->vm_start; in vdso_mremap() 108 unsigned long new_size = new_vma->vm_end - new_vma->vm_start; in vvar_mremap() 150 unsigned long size = vma->vm_end - vma->vm_start; in vdso_join_timens() 153 zap_page_range(vma, vma->vm_start, size); in vdso_join_timens()
|
/kernel/linux/linux-5.10/arch/arc/kernel/ |
D | troubleshoot.c | 91 if (vma && (vma->vm_start <= address)) { in show_faulting_vma() 101 vma->vm_start < TASK_UNMAPPED_BASE ? in show_faulting_vma() 102 address : address - vma->vm_start, in show_faulting_vma() 103 nm, vma->vm_start, vma->vm_end); in show_faulting_vma()
|
D | arc_hostlink.c | 22 if (io_remap_pfn_range(vma, vma->vm_start, vma->vm_pgoff, in arc_hl_mmap() 23 vma->vm_end - vma->vm_start, in arc_hl_mmap()
|
/kernel/linux/linux-5.10/arch/x86/um/ |
D | mem_32.c | 17 gate_vma.vm_start = FIXADDR_USER_START; in gate_vma_init() 49 return (addr >= vma->vm_start) && (addr < vma->vm_end); in in_gate_area()
|
/kernel/linux/linux-5.10/arch/arm64/kernel/ |
D | vdso.c | 85 unsigned long new_size = new_vma->vm_end - new_vma->vm_start; in __vdso_remap() 92 current->mm->context.vdso = (void *)new_vma->vm_start; in __vdso_remap() 151 unsigned long size = vma->vm_end - vma->vm_start; in vdso_join_timens() 154 zap_page_range(vma, vma->vm_start, size); in vdso_join_timens() 157 zap_page_range(vma, vma->vm_start, size); in vdso_join_timens() 225 unsigned long new_size = new_vma->vm_end - new_vma->vm_start; in vvar_mremap()
|
/kernel/linux/linux-5.10/arch/arm64/kvm/ |
D | mmu.c | 425 hva_t vm_start, vm_end; in stage2_unmap_memslot() local 427 if (!vma || vma->vm_start >= reg_end) in stage2_unmap_memslot() 433 vm_start = max(hva, vma->vm_start); in stage2_unmap_memslot() 437 gpa_t gpa = addr + (vm_start - memslot->userspace_addr); in stage2_unmap_memslot() 438 unmap_stage2_range(&kvm->arch.mmu, gpa, vm_end - vm_start); in stage2_unmap_memslot() 1330 hva_t vm_start, vm_end; in kvm_arch_prepare_memory_region() local 1332 if (!vma || vma->vm_start >= reg_end) in kvm_arch_prepare_memory_region() 1338 vm_start = max(hva, vma->vm_start); in kvm_arch_prepare_memory_region() 1343 (vm_start - mem->userspace_addr); in kvm_arch_prepare_memory_region() 1347 pa += vm_start - vma->vm_start; in kvm_arch_prepare_memory_region() [all …]
|
/kernel/linux/linux-5.10/drivers/soc/qcom/ |
D | rmtfs_mem.c | 136 if (vma->vm_end - vma->vm_start > rmtfs_mem->size) { in qcom_rmtfs_mem_mmap() 139 vma->vm_end, vma->vm_start, in qcom_rmtfs_mem_mmap() 140 (vma->vm_end - vma->vm_start), &rmtfs_mem->size); in qcom_rmtfs_mem_mmap() 146 vma->vm_start, in qcom_rmtfs_mem_mmap() 148 vma->vm_end - vma->vm_start, in qcom_rmtfs_mem_mmap()
|
/kernel/linux/linux-5.10/arch/powerpc/kernel/ |
D | proc_powerpc.c | 33 if ((vma->vm_end - vma->vm_start) > PAGE_SIZE) in page_map_mmap() 36 remap_pfn_range(vma, vma->vm_start, in page_map_mmap()
|
/kernel/linux/linux-5.10/drivers/xen/xenfs/ |
D | xenstored.c | 36 size_t size = vma->vm_end - vma->vm_start; in xsd_kva_mmap() 41 if (remap_pfn_range(vma, vma->vm_start, in xsd_kva_mmap()
|