/arch/x86/include/uapi/asm/ |
D | mman.h | 16 #define arch_vm_get_page_prot(vm_flags) __pgprot( \ argument 17 ((vm_flags) & VM_PKEY_BIT0 ? _PAGE_PKEY_BIT0 : 0) | \ 18 ((vm_flags) & VM_PKEY_BIT1 ? _PAGE_PKEY_BIT1 : 0) | \ 19 ((vm_flags) & VM_PKEY_BIT2 ? _PAGE_PKEY_BIT2 : 0) | \ 20 ((vm_flags) & VM_PKEY_BIT3 ? _PAGE_PKEY_BIT3 : 0))
|
/arch/powerpc/include/asm/ |
D | mman.h | 29 static inline pgprot_t arch_vm_get_page_prot(unsigned long vm_flags) in arch_vm_get_page_prot() argument 31 return (vm_flags & VM_SAO) ? __pgprot(_PAGE_SAO) : __pgprot(0); in arch_vm_get_page_prot() 33 #define arch_vm_get_page_prot(vm_flags) arch_vm_get_page_prot(vm_flags) argument
|
/arch/hexagon/mm/ |
D | vm_fault.c | 79 if (!(vma->vm_flags & VM_GROWSDOWN)) in do_page_fault() 91 if (!(vma->vm_flags & VM_EXEC)) in do_page_fault() 95 if (!(vma->vm_flags & VM_READ)) in do_page_fault() 99 if (!(vma->vm_flags & VM_WRITE)) in do_page_fault()
|
/arch/m68k/mm/ |
D | fault.c | 96 if (vma->vm_flags & VM_IO) in do_page_fault() 100 if (!(vma->vm_flags & VM_GROWSDOWN)) in do_page_fault() 123 if (!(vma->vm_flags & VM_WRITE)) in do_page_fault() 130 if (!(vma->vm_flags & (VM_READ | VM_EXEC | VM_WRITE))) in do_page_fault()
|
/arch/nios2/mm/ |
D | cacheflush.c | 87 if (!(mpnt->vm_flags & VM_MAYSHARE)) in flush_aliases() 136 if (vma == NULL || (vma->vm_flags & VM_EXEC)) in flush_cache_range() 156 if (vma->vm_flags & VM_EXEC) in flush_cache_page() 225 if (vma->vm_flags & VM_EXEC) in update_mmu_cache() 256 if (vma->vm_flags & VM_EXEC) in copy_from_user_page() 267 if (vma->vm_flags & VM_EXEC) in copy_to_user_page()
|
D | fault.c | 99 if (!(vma->vm_flags & VM_GROWSDOWN)) in do_page_fault() 116 if (!(vma->vm_flags & VM_EXEC)) in do_page_fault() 120 if (!(vma->vm_flags & VM_READ)) in do_page_fault() 124 if (!(vma->vm_flags & VM_WRITE)) in do_page_fault()
|
/arch/alpha/mm/ |
D | fault.c | 126 if (!(vma->vm_flags & VM_GROWSDOWN)) in do_page_fault() 136 if (!(vma->vm_flags & VM_EXEC)) in do_page_fault() 140 if (!(vma->vm_flags & (VM_READ | VM_WRITE))) in do_page_fault() 143 if (!(vma->vm_flags & VM_WRITE)) in do_page_fault()
|
/arch/powerpc/mm/ |
D | copro_fault.c | 56 if (!(vma->vm_flags & VM_GROWSDOWN)) in copro_handle_mm_fault() 64 if (!(vma->vm_flags & VM_WRITE)) in copro_handle_mm_fault() 67 if (!(vma->vm_flags & (VM_READ | VM_EXEC))) in copro_handle_mm_fault()
|
D | fault.c | 281 return !(vma->vm_flags & VM_EXEC) && in access_error() 283 !(vma->vm_flags & (VM_READ | VM_WRITE))); in access_error() 287 if (unlikely(!(vma->vm_flags & VM_WRITE))) in access_error() 292 if (unlikely(!(vma->vm_flags & (VM_READ | VM_EXEC | VM_WRITE)))) in access_error() 487 if (unlikely(!(vma->vm_flags & VM_GROWSDOWN))) in __do_page_fault()
|
/arch/unicore32/mm/ |
D | flush.c | 26 if (vma->vm_flags & VM_EXEC) in flush_cache_range() 39 if (vma->vm_flags & VM_EXEC) { in flush_ptrace_access()
|
/arch/score/include/asm/ |
D | cacheflush.h | 32 if (vma->vm_flags & VM_EXEC) { in flush_icache_page() 45 if ((vma->vm_flags & VM_EXEC)) \
|
/arch/mips/mm/ |
D | fault.c | 106 if (!(vma->vm_flags & VM_GROWSDOWN)) in __do_page_fault() 118 if (!(vma->vm_flags & VM_WRITE)) in __do_page_fault() 123 if (address == regs->cp0_epc && !(vma->vm_flags & VM_EXEC)) { in __do_page_fault() 133 if (!(vma->vm_flags & VM_READ) && in __do_page_fault() 145 if (!(vma->vm_flags & (VM_READ | VM_WRITE | VM_EXEC))) in __do_page_fault()
|
/arch/tile/include/asm/ |
D | tlbflush.h | 53 if (!vma || (vma != FLUSH_NONEXEC && (vma->vm_flags & VM_EXEC))) in local_flush_tlb_page() 67 if (!vma || (vma != FLUSH_NONEXEC && (vma->vm_flags & VM_EXEC))) in local_flush_tlb_pages()
|
/arch/cris/mm/ |
D | fault.c | 130 if (!(vma->vm_flags & VM_GROWSDOWN)) in do_page_fault() 156 if (!(vma->vm_flags & VM_EXEC)) in do_page_fault() 159 if (!(vma->vm_flags & VM_WRITE)) in do_page_fault() 163 if (!(vma->vm_flags & (VM_READ | VM_EXEC))) in do_page_fault()
|
/arch/xtensa/mm/ |
D | fault.c | 86 if (!(vma->vm_flags & VM_GROWSDOWN)) in do_page_fault() 99 if (!(vma->vm_flags & VM_WRITE)) in do_page_fault() 103 if (!(vma->vm_flags & VM_EXEC)) in do_page_fault() 106 if (!(vma->vm_flags & (VM_READ | VM_WRITE))) in do_page_fault()
|
/arch/sparc/mm/ |
D | fault_32.c | 216 if (!(vma->vm_flags & VM_GROWSDOWN)) in do_sparc_fault() 227 if (!(vma->vm_flags & VM_WRITE)) in do_sparc_fault() 231 if (!(vma->vm_flags & (VM_READ | VM_EXEC))) in do_sparc_fault() 401 if (!(vma->vm_flags & VM_GROWSDOWN)) in force_user_fault() 408 if (!(vma->vm_flags & VM_WRITE)) in force_user_fault() 412 if (!(vma->vm_flags & (VM_READ | VM_EXEC))) in force_user_fault()
|
D | fault_64.c | 365 (vma->vm_flags & VM_WRITE) != 0) { in do_sparc64_fault() 386 if (!(vma->vm_flags & VM_GROWSDOWN)) in do_sparc64_fault() 414 if ((fault_code & FAULT_CODE_ITLB) && !(vma->vm_flags & VM_EXEC)) { in do_sparc64_fault() 422 if (!(vma->vm_flags & VM_WRITE)) in do_sparc64_fault() 429 (vma->vm_flags & VM_EXEC) != 0 && in do_sparc64_fault() 437 if (!(vma->vm_flags & (VM_READ | VM_EXEC))) in do_sparc64_fault()
|
/arch/ia64/mm/ |
D | fault.c | 154 if (((isr >> IA64_ISR_R_BIT) & 1UL) && (!(vma->vm_flags & (VM_READ | VM_WRITE)))) in ia64_do_page_fault() 157 if ((vma->vm_flags & mask) != mask) in ia64_do_page_fault() 209 if (!(prev_vma && (prev_vma->vm_flags & VM_GROWSUP) && (address == prev_vma->vm_end))) { in ia64_do_page_fault() 212 if (!(vma->vm_flags & VM_GROWSDOWN)) in ia64_do_page_fault()
|
/arch/arc/mm/ |
D | fault.c | 108 if (!(vma->vm_flags & VM_GROWSDOWN)) in do_page_fault() 127 if (!(vma->vm_flags & VM_WRITE)) in do_page_fault() 131 if (!(vma->vm_flags & (VM_READ | VM_EXEC))) in do_page_fault()
|
/arch/score/mm/ |
D | fault.c | 89 if (!(vma->vm_flags & VM_GROWSDOWN)) in do_page_fault() 101 if (!(vma->vm_flags & VM_WRITE)) in do_page_fault() 105 if (!(vma->vm_flags & (VM_READ | VM_WRITE | VM_EXEC))) in do_page_fault()
|
D | cache.c | 83 int exec = (vma->vm_flags & VM_EXEC); in __update_cache() 176 int exec = vma->vm_flags & VM_EXEC; in flush_cache_range() 214 int exec = vma->vm_flags & VM_EXEC; in flush_cache_page()
|
/arch/alpha/include/asm/ |
D | tlbflush.h | 44 if (vma->vm_flags & VM_EXEC) { in ev4_flush_tlb_current_page() 56 if (vma->vm_flags & VM_EXEC) in ev5_flush_tlb_current_page()
|
/arch/microblaze/mm/ |
D | fault.c | 156 if (unlikely(!(vma->vm_flags & VM_GROWSDOWN))) in do_page_fault() 202 if (unlikely(!(vma->vm_flags & VM_WRITE))) in do_page_fault() 210 if (unlikely(!(vma->vm_flags & (VM_READ | VM_EXEC)))) in do_page_fault()
|
/arch/tile/kernel/ |
D | tlb.c | 57 int cache = (vma->vm_flags & VM_EXEC) ? HV_FLUSH_EVICT_L1I : 0; in flush_tlb_page_mm() 73 int cache = (vma->vm_flags & VM_EXEC) ? HV_FLUSH_EVICT_L1I : 0; in flush_tlb_range()
|
/arch/openrisc/mm/ |
D | fault.c | 120 if (!(vma->vm_flags & VM_GROWSDOWN)) in do_page_fault() 147 if (!(vma->vm_flags & VM_WRITE)) in do_page_fault() 152 if (!(vma->vm_flags & (VM_READ | VM_EXEC))) in do_page_fault()
|