Home
last modified time | relevance | path

Searched refs:vma (Results 1 – 25 of 384) sorted by relevance

12345678910>>...16

/arch/powerpc/include/asm/book3s/64/
Dtlbflush.h12 static inline void flush_pmd_tlb_range(struct vm_area_struct *vma, in flush_pmd_tlb_range() argument
16 return radix__flush_pmd_tlb_range(vma, start, end); in flush_pmd_tlb_range()
17 return hash__flush_tlb_range(vma, start, end); in flush_pmd_tlb_range()
21 static inline void flush_hugetlb_tlb_range(struct vm_area_struct *vma, in flush_hugetlb_tlb_range() argument
26 return radix__flush_hugetlb_tlb_range(vma, start, end); in flush_hugetlb_tlb_range()
27 return hash__flush_tlb_range(vma, start, end); in flush_hugetlb_tlb_range()
30 static inline void flush_tlb_range(struct vm_area_struct *vma, in flush_tlb_range() argument
34 return radix__flush_tlb_range(vma, start, end); in flush_tlb_range()
35 return hash__flush_tlb_range(vma, start, end); in flush_tlb_range()
53 static inline void local_flush_tlb_page(struct vm_area_struct *vma, in local_flush_tlb_page() argument
[all …]
/arch/metag/include/asm/
Dtlb.h12 #define tlb_start_vma(tlb, vma) \ argument
15 flush_cache_range(vma, vma->vm_start, vma->vm_end); \
18 #define tlb_end_vma(tlb, vma) \ argument
21 flush_tlb_range(vma, vma->vm_start, vma->vm_end); \
27 #define tlb_start_vma(tlb, vma) do { } while (0) argument
28 #define tlb_end_vma(tlb, vma) do { } while (0) argument
/arch/arc/include/asm/
Dtlbflush.h16 void local_flush_tlb_page(struct vm_area_struct *vma, unsigned long page);
18 void local_flush_tlb_range(struct vm_area_struct *vma,
21 void local_flush_pmd_tlb_range(struct vm_area_struct *vma, unsigned long start,
26 #define flush_tlb_range(vma, s, e) local_flush_tlb_range(vma, s, e) argument
27 #define flush_tlb_page(vma, page) local_flush_tlb_page(vma, page) argument
32 #define flush_pmd_tlb_range(vma, s, e) local_flush_pmd_tlb_range(vma, s, e) argument
35 extern void flush_tlb_range(struct vm_area_struct *vma, unsigned long start,
37 extern void flush_tlb_page(struct vm_area_struct *vma, unsigned long page);
42 extern void flush_pmd_tlb_range(struct vm_area_struct *vma, unsigned long start, unsigned long end);
Dtlb.h27 #define tlb_start_vma(tlb, vma) argument
29 #define tlb_start_vma(tlb, vma) \ argument
32 flush_cache_range(vma, vma->vm_start, vma->vm_end); \
36 #define tlb_end_vma(tlb, vma) \ argument
39 flush_tlb_range(vma, vma->vm_start, vma->vm_end); \
/arch/xtensa/include/asm/
Dtlb.h21 # define tlb_start_vma(tlb,vma) do { } while (0) argument
22 # define tlb_end_vma(tlb,vma) do { } while (0) argument
26 # define tlb_start_vma(tlb, vma) \ argument
29 flush_cache_range(vma, vma->vm_start, vma->vm_end); \
32 # define tlb_end_vma(tlb, vma) \ argument
35 flush_tlb_range(vma, vma->vm_start, vma->vm_end); \
/arch/m32r/include/asm/
Dcacheflush.h14 #define flush_cache_range(vma, start, end) do { } while (0) argument
15 #define flush_cache_page(vma, vmaddr, pfn) do { } while (0) argument
22 #define flush_icache_page(vma,pg) _flush_cache_copyback_all() argument
23 #define flush_icache_user_range(vma,pg,adr,len) _flush_cache_copyback_all() argument
28 #define flush_icache_page(vma,pg) smp_flush_cache_all() argument
29 #define flush_icache_user_range(vma,pg,adr,len) smp_flush_cache_all() argument
36 #define flush_cache_range(vma, start, end) do { } while (0) argument
37 #define flush_cache_page(vma, vmaddr, pfn) do { } while (0) argument
43 #define flush_icache_page(vma,pg) _flush_cache_all() argument
44 #define flush_icache_user_range(vma,pg,adr,len) _flush_cache_all() argument
[all …]
Dtlbflush.h28 #define flush_tlb_page(vma, page) local_flush_tlb_page(vma, page) argument
29 #define flush_tlb_range(vma, start, end) \ argument
30 local_flush_tlb_range(vma, start, end)
35 #define flush_tlb_page(vma, vmaddr) do { } while (0) argument
36 #define flush_tlb_range(vma, start, end) do { } while (0) argument
47 #define flush_tlb_page(vma, page) smp_flush_tlb_page(vma, page) argument
48 #define flush_tlb_range(vma, start, end) \ argument
49 smp_flush_tlb_range(vma, start, end)
/arch/ia64/include/asm/
Dfb.h10 static inline void fb_pgprotect(struct file *file, struct vm_area_struct *vma, in fb_pgprotect() argument
13 if (efi_range_is_wc(vma->vm_start, vma->vm_end - vma->vm_start)) in fb_pgprotect()
14 vma->vm_page_prot = pgprot_writecombine(vma->vm_page_prot); in fb_pgprotect()
16 vma->vm_page_prot = pgprot_noncached(vma->vm_page_prot); in fb_pgprotect()
/arch/sparc/include/asm/
Dtlb_32.h5 #define tlb_start_vma(tlb, vma) \ argument
7 flush_cache_range(vma, vma->vm_start, vma->vm_end); \
10 #define tlb_end_vma(tlb, vma) \ argument
12 flush_tlb_range(vma, vma->vm_start, vma->vm_end); \
Dcacheflush_64.h24 #define flush_cache_range(vma, start, end) \ argument
25 flush_cache_mm((vma)->vm_mm)
26 #define flush_cache_page(vma, page, pfn) \ argument
27 flush_cache_mm((vma)->vm_mm)
51 #define flush_icache_page(vma, pg) do { } while(0) argument
52 #define flush_icache_user_range(vma,pg,adr,len) do { } while (0) argument
58 #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ argument
60 flush_cache_page(vma, vaddr, page_to_pfn(page)); \
62 flush_ptrace_access(vma, page, vaddr, src, len, 0); \
65 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ argument
[all …]
Dcacheflush_32.h13 #define flush_cache_range(vma,start,end) \ argument
14 sparc32_cachetlb_ops->cache_range(vma, start, end)
15 #define flush_cache_page(vma,addr,pfn) \ argument
16 sparc32_cachetlb_ops->cache_page(vma, addr)
18 #define flush_icache_page(vma, pg) do { } while (0) argument
20 #define flush_icache_user_range(vma,pg,adr,len) do { } while (0) argument
22 #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ argument
24 flush_cache_page(vma, vaddr, page_to_pfn(page));\
27 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ argument
29 flush_cache_page(vma, vaddr, page_to_pfn(page));\
/arch/arc/kernel/
Darc_hostlink.c21 static int arc_hl_mmap(struct file *fp, struct vm_area_struct *vma) in arc_hl_mmap() argument
23 vma->vm_page_prot = pgprot_noncached(vma->vm_page_prot); in arc_hl_mmap()
25 if (io_remap_pfn_range(vma, vma->vm_start, vma->vm_pgoff, in arc_hl_mmap()
26 vma->vm_end - vma->vm_start, in arc_hl_mmap()
27 vma->vm_page_prot)) { in arc_hl_mmap()
/arch/alpha/include/asm/
Dcacheflush.h11 #define flush_cache_range(vma, start, end) do { } while (0) argument
12 #define flush_cache_page(vma, vmaddr, pfn) do { } while (0) argument
51 flush_icache_user_range(struct vm_area_struct *vma, struct page *page, in flush_icache_user_range() argument
54 if (vma->vm_flags & VM_EXEC) { in flush_icache_user_range()
55 struct mm_struct *mm = vma->vm_mm; in flush_icache_user_range()
63 extern void flush_icache_user_range(struct vm_area_struct *vma,
68 #define flush_icache_page(vma, page) \ argument
69 flush_icache_user_range((vma), (page), 0, 0)
71 #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ argument
73 flush_icache_user_range(vma, page, vaddr, len); \
[all …]
/arch/metag/mm/
Dhugetlbpage.c36 struct vm_area_struct *vma; in prepare_hugepage_range() local
45 vma = find_vma(mm, ALIGN_HUGEPT(addr)); in prepare_hugepage_range()
46 if (vma && !(vma->vm_flags & MAP_HUGETLB)) in prepare_hugepage_range()
49 vma = find_vma(mm, addr); in prepare_hugepage_range()
50 if (vma) { in prepare_hugepage_range()
51 if (addr + len > vma->vm_start) in prepare_hugepage_range()
53 if (!(vma->vm_flags & MAP_HUGETLB) && in prepare_hugepage_range()
54 (ALIGN_HUGEPT(addr + len) > vma->vm_start)) in prepare_hugepage_range()
128 struct vm_area_struct *vma; in hugetlb_get_unmapped_area_existing() local
142 for (vma = find_vma(mm, addr); ; vma = vma->vm_next) { in hugetlb_get_unmapped_area_existing()
[all …]
/arch/parisc/include/asm/
Dtlb.h10 #define tlb_start_vma(tlb, vma) \ argument
12 flush_cache_range(vma, vma->vm_start, vma->vm_end); \
15 #define tlb_end_vma(tlb, vma) \ argument
17 flush_tlb_range(vma, vma->vm_start, vma->vm_end); \
/arch/powerpc/include/asm/
Dtlbflush.h37 extern void flush_tlb_range(struct vm_area_struct *vma, unsigned long start,
42 extern void local_flush_tlb_page(struct vm_area_struct *vma, unsigned long vmaddr);
49 extern void flush_tlb_page(struct vm_area_struct *vma, unsigned long vmaddr);
54 #define flush_tlb_page(vma,addr) local_flush_tlb_page(vma,addr) argument
65 extern void flush_tlb_page(struct vm_area_struct *vma, unsigned long vmaddr);
66 extern void flush_tlb_page_nohash(struct vm_area_struct *vma, unsigned long addr);
67 extern void flush_tlb_range(struct vm_area_struct *vma, unsigned long start,
70 static inline void local_flush_tlb_page(struct vm_area_struct *vma, in local_flush_tlb_page() argument
73 flush_tlb_page(vma, vmaddr); in local_flush_tlb_page()
Dhugetlb.h37 static inline void flush_hugetlb_page(struct vm_area_struct *vma, in flush_hugetlb_page() argument
41 return radix__flush_hugetlb_page(vma, vmaddr); in flush_hugetlb_page()
44 static inline void __local_flush_hugetlb_page(struct vm_area_struct *vma, in __local_flush_hugetlb_page() argument
48 return radix__local_flush_hugetlb_page(vma, vmaddr); in __local_flush_hugetlb_page()
111 void book3e_hugetlb_preload(struct vm_area_struct *vma, unsigned long ea,
114 static inline void flush_hugetlb_page(struct vm_area_struct *vma, in flush_hugetlb_page() argument
117 flush_tlb_page(vma, vmaddr); in flush_hugetlb_page()
120 void flush_hugetlb_page(struct vm_area_struct *vma, unsigned long vmaddr);
164 static inline void huge_ptep_clear_flush(struct vm_area_struct *vma, in huge_ptep_clear_flush() argument
168 pte = huge_ptep_get_and_clear(vma->vm_mm, addr, ptep); in huge_ptep_clear_flush()
[all …]
/arch/powerpc/mm/
Dhugetlbpage-radix.c12 void radix__flush_hugetlb_page(struct vm_area_struct *vma, unsigned long vmaddr) in radix__flush_hugetlb_page() argument
15 struct hstate *hstate = hstate_file(vma->vm_file); in radix__flush_hugetlb_page()
18 radix__flush_tlb_page_psize(vma->vm_mm, vmaddr, psize); in radix__flush_hugetlb_page()
21 void radix__local_flush_hugetlb_page(struct vm_area_struct *vma, unsigned long vmaddr) in radix__local_flush_hugetlb_page() argument
24 struct hstate *hstate = hstate_file(vma->vm_file); in radix__local_flush_hugetlb_page()
27 radix__local_flush_tlb_page_psize(vma->vm_mm, vmaddr, psize); in radix__local_flush_hugetlb_page()
30 void radix__flush_hugetlb_tlb_range(struct vm_area_struct *vma, unsigned long start, in radix__flush_hugetlb_tlb_range() argument
34 struct hstate *hstate = hstate_file(vma->vm_file); in radix__flush_hugetlb_tlb_range()
37 radix__flush_tlb_range_psize(vma->vm_mm, start, end, psize); in radix__flush_hugetlb_tlb_range()
51 struct vm_area_struct *vma; in radix__hugetlb_get_unmapped_area() local
[all …]
/arch/parisc/kernel/
Dcache.c79 update_mmu_cache(struct vm_area_struct *vma, unsigned long address, pte_t *ptep) in update_mmu_cache() argument
295 __flush_cache_page(struct vm_area_struct *vma, unsigned long vmaddr, in __flush_cache_page() argument
300 if (vma->vm_flags & VM_EXEC) in __flush_cache_page()
514 struct vm_area_struct *vma; in mm_total_size() local
517 for (vma = mm->mmap; vma; vma = vma->vm_next) in mm_total_size()
518 usize += vma->vm_end - vma->vm_start; in mm_total_size()
539 struct vm_area_struct *vma; in flush_cache_mm() local
553 for (vma = mm->mmap; vma; vma = vma->vm_next) { in flush_cache_mm()
554 flush_user_dcache_range_asm(vma->vm_start, vma->vm_end); in flush_cache_mm()
555 if (vma->vm_flags & VM_EXEC) in flush_cache_mm()
[all …]
/arch/arm/mm/
Dfault-armv.c40 static int do_adjust_pte(struct vm_area_struct *vma, unsigned long address, in do_adjust_pte() argument
56 flush_cache_page(vma, address, pfn); in do_adjust_pte()
61 set_pte_at(vma->vm_mm, address, ptep, entry); in do_adjust_pte()
62 flush_tlb_page(vma, address); in do_adjust_pte()
92 static int adjust_pte(struct vm_area_struct *vma, unsigned long address, in adjust_pte() argument
102 pgd = pgd_offset(vma->vm_mm, address); in adjust_pte()
119 ptl = pte_lockptr(vma->vm_mm, pmd); in adjust_pte()
123 ret = do_adjust_pte(vma, address, pfn, pte); in adjust_pte()
132 make_coherent(struct address_space *mapping, struct vm_area_struct *vma, in make_coherent() argument
135 struct mm_struct *mm = vma->vm_mm; in make_coherent()
[all …]
/arch/sh/include/asm/
Dtlbflush.h16 extern void local_flush_tlb_range(struct vm_area_struct *vma,
19 extern void local_flush_tlb_page(struct vm_area_struct *vma,
31 extern void flush_tlb_range(struct vm_area_struct *vma, unsigned long start,
33 extern void flush_tlb_page(struct vm_area_struct *vma, unsigned long page);
41 #define flush_tlb_page(vma, page) local_flush_tlb_page(vma, page) argument
44 #define flush_tlb_range(vma, start, end) \ argument
45 local_flush_tlb_range(vma, start, end)
/arch/parisc/mm/
Dfault.c235 struct vm_area_struct *vma) in show_signal_msg() argument
249 vma ? ',':'\n'); in show_signal_msg()
251 if (vma) in show_signal_msg()
253 vma->vm_start, vma->vm_end); in show_signal_msg()
261 struct vm_area_struct *vma, *prev_vma; in do_page_fault() local
285 vma = find_vma_prev(mm, address, &prev_vma); in do_page_fault()
286 if (!vma || address < vma->vm_start) in do_page_fault()
295 if ((vma->vm_flags & acc_type) != acc_type) in do_page_fault()
304 fault = handle_mm_fault(vma, address, flags); in do_page_fault()
345 vma = prev_vma; in do_page_fault()
[all …]
/arch/mips/include/asm/
Dtlbflush.h18 extern void local_flush_tlb_range(struct vm_area_struct *vma,
22 extern void local_flush_tlb_page(struct vm_area_struct *vma,
30 extern void flush_tlb_range(struct vm_area_struct *vma, unsigned long,
40 #define flush_tlb_range(vma, vmaddr, end) local_flush_tlb_range(vma, vmaddr, end) argument
43 #define flush_tlb_page(vma, page) local_flush_tlb_page(vma, page) argument
/arch/nios2/kernel/
Dsys_nios2.c24 struct vm_area_struct *vma; in sys_cacheflush() local
41 vma = find_vma(current->mm, addr); in sys_cacheflush()
42 if (vma == NULL || addr < vma->vm_start || addr + len > vma->vm_end) in sys_cacheflush()
45 flush_cache_range(vma, addr, addr + len); in sys_cacheflush()
/arch/score/include/asm/
Dcacheflush.h10 extern void flush_cache_range(struct vm_area_struct *vma,
12 extern void flush_cache_page(struct vm_area_struct *vma,
29 static inline void flush_icache_page(struct vm_area_struct *vma, in flush_icache_page() argument
32 if (vma->vm_flags & VM_EXEC) { in flush_icache_page()
39 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ argument
42 #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ argument
45 if ((vma->vm_flags & VM_EXEC)) \
46 flush_cache_page(vma, vaddr, page_to_pfn(page));\

12345678910>>...16