/arch/m32r/include/asm/ |
D | cacheflush.h | 13 #define flush_cache_range(vma, start, end) do { } while (0) argument 14 #define flush_cache_page(vma, vmaddr, pfn) do { } while (0) argument 21 #define flush_icache_page(vma,pg) _flush_cache_copyback_all() argument 22 #define flush_icache_user_range(vma,pg,adr,len) _flush_cache_copyback_all() argument 27 #define flush_icache_page(vma,pg) smp_flush_cache_all() argument 28 #define flush_icache_user_range(vma,pg,adr,len) smp_flush_cache_all() argument 35 #define flush_cache_range(vma, start, end) do { } while (0) argument 36 #define flush_cache_page(vma, vmaddr, pfn) do { } while (0) argument 42 #define flush_icache_page(vma,pg) _flush_cache_all() argument 43 #define flush_icache_user_range(vma,pg,adr,len) _flush_cache_all() argument [all …]
|
D | tlbflush.h | 27 #define flush_tlb_page(vma, page) local_flush_tlb_page(vma, page) argument 28 #define flush_tlb_range(vma, start, end) \ argument 34 #define flush_tlb_page(vma, vmaddr) do { } while (0) argument 35 #define flush_tlb_range(vma, start, end) do { } while (0) argument 46 #define flush_tlb_page(vma, page) smp_flush_tlb_page(vma, page) argument 47 #define flush_tlb_range(vma, start, end) \ argument
|
/arch/metag/include/asm/ |
D | tlb.h | 11 #define tlb_start_vma(tlb, vma) \ argument 17 #define tlb_end_vma(tlb, vma) \ argument 26 #define tlb_start_vma(tlb, vma) do { } while (0) argument 27 #define tlb_end_vma(tlb, vma) do { } while (0) argument
|
/arch/powerpc/include/asm/book3s/64/ |
D | tlbflush.h | 11 static inline void flush_pmd_tlb_range(struct vm_area_struct *vma, in flush_pmd_tlb_range() 20 static inline void flush_hugetlb_tlb_range(struct vm_area_struct *vma, in flush_hugetlb_tlb_range() 29 static inline void flush_tlb_range(struct vm_area_struct *vma, in flush_tlb_range() 52 static inline void local_flush_tlb_page(struct vm_area_struct *vma, in local_flush_tlb_page() 75 static inline void flush_tlb_page(struct vm_area_struct *vma, in flush_tlb_page() 84 #define flush_tlb_page(vma, addr) local_flush_tlb_page(vma, addr) argument
|
/arch/xtensa/include/asm/ |
D | tlb.h | 21 # define tlb_start_vma(tlb,vma) do { } while (0) argument 22 # define tlb_end_vma(tlb,vma) do { } while (0) argument 26 # define tlb_start_vma(tlb, vma) \ argument 32 # define tlb_end_vma(tlb, vma) \ argument
|
/arch/hexagon/include/asm/ |
D | cacheflush.h | 44 #define flush_cache_range(vma, start, end) do { } while (0) argument 45 #define flush_cache_page(vma, vmaddr, pfn) do { } while (0) argument 50 #define flush_icache_page(vma, pg) do { } while (0) argument 51 #define flush_icache_user_range(vma, pg, adr, len) do { } while (0) argument 86 static inline void update_mmu_cache(struct vm_area_struct *vma, in update_mmu_cache() 95 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ argument
|
/arch/ia64/include/asm/ |
D | cacheflush.h | 22 #define flush_cache_range(vma, start, end) do { } while (0) argument 23 #define flush_cache_page(vma, vmaddr, pfn) do { } while (0) argument 24 #define flush_icache_page(vma,page) do { } while (0) argument 41 #define flush_icache_user_range(vma, page, user_addr, len) \ argument 47 #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ argument 51 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ argument
|
/arch/powerpc/mm/ |
D | hugetlbpage-radix.c | 10 void radix__flush_hugetlb_page(struct vm_area_struct *vma, unsigned long vmaddr) in radix__flush_hugetlb_page() 19 void radix__local_flush_hugetlb_page(struct vm_area_struct *vma, unsigned long vmaddr) in radix__local_flush_hugetlb_page() 28 void radix__flush_hugetlb_tlb_range(struct vm_area_struct *vma, unsigned long start, in radix__flush_hugetlb_tlb_range() 49 struct vm_area_struct *vma; in radix__hugetlb_get_unmapped_area() local
|
/arch/arc/include/asm/ |
D | tlb.h | 27 #define tlb_start_vma(tlb, vma) argument 29 #define tlb_start_vma(tlb, vma) \ argument 36 #define tlb_end_vma(tlb, vma) \ argument
|
D | tlbflush.h | 26 #define flush_tlb_range(vma, s, e) local_flush_tlb_range(vma, s, e) argument 27 #define flush_tlb_page(vma, page) local_flush_tlb_page(vma, page) argument 32 #define flush_pmd_tlb_range(vma, s, e) local_flush_pmd_tlb_range(vma, s, e) argument
|
/arch/cris/include/asm/ |
D | cacheflush.h | 13 #define flush_cache_range(vma, start, end) do { } while (0) argument 14 #define flush_cache_page(vma, vmaddr, pfn) do { } while (0) argument 20 #define flush_icache_page(vma,pg) do { } while (0) argument 21 #define flush_icache_user_range(vma,pg,adr,len) do { } while (0) argument 25 #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ argument 27 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ argument
|
/arch/unicore32/mm/ |
D | flush.c | 23 void flush_cache_range(struct vm_area_struct *vma, unsigned long start, in flush_cache_range() 30 void flush_cache_page(struct vm_area_struct *vma, unsigned long user_addr, in flush_cache_page() 35 static void flush_ptrace_access(struct vm_area_struct *vma, struct page *page, in flush_ptrace_access() 53 void copy_to_user_page(struct vm_area_struct *vma, struct page *page, in copy_to_user_page()
|
/arch/sparc/include/asm/ |
D | cacheflush_64.h | 23 #define flush_cache_range(vma, start, end) \ argument 25 #define flush_cache_page(vma, page, pfn) \ argument 50 #define flush_icache_page(vma, pg) do { } while(0) argument 51 #define flush_icache_user_range(vma,pg,adr,len) do { } while (0) argument 57 #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ argument 64 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ argument
|
D | cacheflush_32.h | 12 #define flush_cache_range(vma,start,end) \ argument 14 #define flush_cache_page(vma,addr,pfn) \ argument 17 #define flush_icache_page(vma, pg) do { } while (0) argument 19 #define flush_icache_user_range(vma,pg,adr,len) do { } while (0) argument 21 #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ argument 26 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ argument
|
/arch/alpha/include/asm/ |
D | cacheflush.h | 10 #define flush_cache_range(vma, start, end) do { } while (0) argument 11 #define flush_cache_page(vma, vmaddr, pfn) do { } while (0) argument 50 flush_icache_user_range(struct vm_area_struct *vma, struct page *page, in flush_icache_user_range() 67 #define flush_icache_page(vma, page) \ argument 70 #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ argument 74 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ argument
|
D | tlbflush.h | 39 struct vm_area_struct *vma, in ev4_flush_tlb_current_page() 52 struct vm_area_struct *vma, in ev5_flush_tlb_current_page() 117 flush_tlb_page(struct vm_area_struct *vma, unsigned long addr) in flush_tlb_page() 130 flush_tlb_range(struct vm_area_struct *vma, unsigned long start, in flush_tlb_range()
|
/arch/m68k/include/asm/ |
D | cacheflush_no.h | 13 #define flush_cache_range(vma, start, end) do { } while (0) argument 14 #define flush_cache_page(vma, vmaddr) do { } while (0) argument 21 #define flush_icache_page(vma,pg) do { } while (0) argument 22 #define flush_icache_user_range(vma,pg,adr,len) do { } while (0) argument 26 #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ argument 28 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ argument
|
/arch/frv/include/asm/ |
D | cacheflush.h | 25 #define flush_cache_page(vma, vmaddr, pfn) do {} while(0) argument 80 static inline void flush_icache_user_range(struct vm_area_struct *vma, struct page *page, in flush_icache_user_range() 87 static inline void flush_icache_page(struct vm_area_struct *vma, struct page *page) in flush_icache_page() 96 #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ argument 102 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ argument
|
/arch/x86/include/asm/ |
D | mmu_context.h | 180 struct vm_area_struct *vma) in arch_bprm_mm_init() 185 static inline void arch_unmap(struct mm_struct *mm, struct vm_area_struct *vma, in arch_unmap() 210 static inline int vma_pkey(struct vm_area_struct *vma) in vma_pkey() 218 static inline int vma_pkey(struct vm_area_struct *vma) in vma_pkey() 245 static inline bool vma_is_foreign(struct vm_area_struct *vma) in vma_is_foreign() 260 static inline bool arch_vma_access_permitted(struct vm_area_struct *vma, in arch_vma_access_permitted()
|
/arch/xtensa/mm/ |
D | cache.c | 109 unsigned long vaddr, struct vm_area_struct *vma) in copy_user_highpage() 183 void local_flush_cache_range(struct vm_area_struct *vma, in local_flush_cache_range() 198 void local_flush_cache_page(struct vm_area_struct *vma, unsigned long address, in local_flush_cache_page() 214 update_mmu_cache(struct vm_area_struct * vma, unsigned long addr, pte_t *ptep) in update_mmu_cache() 261 void copy_to_user_page(struct vm_area_struct *vma, struct page *page, in copy_to_user_page() 297 extern void copy_from_user_page(struct vm_area_struct *vma, struct page *page, in copy_from_user_page()
|
/arch/sh/mm/ |
D | cache.c | 61 void copy_to_user_page(struct vm_area_struct *vma, struct page *page, in copy_to_user_page() 80 void copy_from_user_page(struct vm_area_struct *vma, struct page *page, in copy_from_user_page() 97 unsigned long vaddr, struct vm_area_struct *vma) in copy_user_highpage() 137 void __update_cache(struct vm_area_struct *vma, in __update_cache() 194 void flush_cache_page(struct vm_area_struct *vma, unsigned long addr, in flush_cache_page() 206 void flush_cache_range(struct vm_area_struct *vma, unsigned long start, in flush_cache_range() 237 void flush_icache_page(struct vm_area_struct *vma, struct page *page) in flush_icache_page()
|
/arch/arm64/mm/ |
D | flush.c | 28 void flush_cache_range(struct vm_area_struct *vma, unsigned long start, in flush_cache_range() 47 static void flush_ptrace_access(struct vm_area_struct *vma, struct page *page, in flush_ptrace_access() 60 void copy_to_user_page(struct vm_area_struct *vma, struct page *page, in copy_to_user_page()
|
/arch/unicore32/include/asm/ |
D | cacheflush.h | 120 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ argument 152 #define flush_cache_user_range(vma, start, end) \ argument 187 #define flush_icache_user_range(vma, page, addr, len) \ argument 194 #define flush_icache_page(vma, page) do { } while (0) argument
|
/arch/powerpc/include/asm/ |
D | cacheflush.h | 23 #define flush_cache_range(vma, start, end) do { } while (0) argument 24 #define flush_cache_page(vma, vmaddr, pfn) do { } while (0) argument 25 #define flush_icache_page(vma, page) do { } while (0) argument 105 #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ argument 110 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ argument
|
/arch/tile/include/asm/ |
D | cacheflush.h | 29 #define flush_cache_range(vma, start, end) do { } while (0) argument 30 #define flush_cache_page(vma, vmaddr, pfn) do { } while (0) argument 37 #define flush_icache_page(vma, pg) do { } while (0) argument 38 #define flush_icache_user_range(vma, pg, adr, len) do { } while (0) argument 64 static inline void copy_to_user_page(struct vm_area_struct *vma, in copy_to_user_page() 75 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ argument
|