Home
last modified time | relevance | path

Searched refs:_PAGE_INVALID (Results 1 – 16 of 16) sorted by relevance

/arch/s390/mm/
Dpgtable.c94 if (unlikely(pte_val(old) & _PAGE_INVALID)) in ptep_flush_direct()
113 if (unlikely(pte_val(old) & _PAGE_INVALID)) in ptep_flush_lazy()
118 pte_val(*ptep) |= _PAGE_INVALID; in ptep_flush_lazy()
179 if (!mm_uses_skeys(mm) || pte_val(pte) & _PAGE_INVALID) in pgste_update_all()
201 if (!mm_uses_skeys(mm) || pte_val(entry) & _PAGE_INVALID) in pgste_set_key()
203 VM_BUG_ON(!(pte_val(*ptep) & _PAGE_INVALID)); in pgste_set_key()
221 !(pte_val(entry) & _PAGE_INVALID)) { in pgste_set_pte()
272 if (pte_val(old) & _PAGE_INVALID) in ptep_xchg_commit()
274 if (pte_val(new) & _PAGE_INVALID) { in ptep_xchg_commit()
582 pte_val(*ptep) = _PAGE_INVALID; in pgtable_trans_huge_withdraw()
[all …]
Ddump_pagetables.c100 if (pr & _PAGE_INVALID) { in print_prot()
113 if (st->current_prot & _PAGE_INVALID) in note_prot_wx()
141 if (level == 4 && (val & _PAGE_INVALID)) in note_page()
142 prot = _PAGE_INVALID; in note_page()
145 prot = _PAGE_INVALID; in note_page()
Dhugetlbpage.c54 rste |= move_set_bit(pte_val(pte), _PAGE_INVALID, in __pte_to_rste()
111 _PAGE_INVALID); in __rste_to_pte()
125 pte_val(pte) = _PAGE_INVALID; in __rste_to_pte()
Dpgalloc.c165 memset64(table, _PAGE_INVALID, PTRS_PER_PTE); in page_table_alloc_pgste()
224 memset64((u64 *)table, _PAGE_INVALID, PTRS_PER_PTE); in page_table_alloc()
229 memset64((u64 *)table, _PAGE_INVALID, 2 * PTRS_PER_PTE); in page_table_alloc()
333 memset64(table, _PAGE_INVALID, PTRS_PER_PTE); in base_pgt_alloc()
Dkasan_init.c79 memset64((u64 *)pte, _PAGE_INVALID, PTRS_PER_PTE); in kasan_early_pte_alloc()
Dpageattr.c361 pte_val(*pte) &= ~_PAGE_INVALID; in __kernel_map_pages()
Dvmem.c63 memset64((u64 *)pte, _PAGE_INVALID, PTRS_PER_PTE); in vmem_pte_alloc()
Dgmap.c1298 if (!table || *table & _PAGE_INVALID) in gmap_unshadow_page()
1319 pgt[i] = _PAGE_INVALID; in __gmap_unshadow_pgt()
2542 ptep_xchg_direct(walk->mm, addr, ptep, __pte(_PAGE_INVALID)); in __zap_zero_pages()
/arch/s390/include/asm/
Dpgtable.h166 #define _PAGE_INVALID 0x400 /* HW invalid bit */ macro
400 #define PAGE_NONE __pgprot(_PAGE_PRESENT | _PAGE_INVALID | _PAGE_PROTECT)
402 _PAGE_NOEXEC | _PAGE_INVALID | _PAGE_PROTECT)
404 _PAGE_INVALID | _PAGE_PROTECT)
406 _PAGE_NOEXEC | _PAGE_INVALID | _PAGE_PROTECT)
408 _PAGE_INVALID | _PAGE_PROTECT)
765 return pte_val(pte) == _PAGE_INVALID; in pte_none()
909 pte_val(*ptep) = _PAGE_INVALID; in pte_clear()
925 pte_val(pte) &= ~_PAGE_INVALID; in pte_modify()
968 pte_val(pte) |= _PAGE_INVALID; in pte_mkold()
[all …]
/arch/powerpc/mm/ptdump/
Dbook3s64.c44 .mask = _PAGE_PRESENT | _PAGE_INVALID,
/arch/powerpc/include/asm/book3s/64/
Dpgtable.h55 #define _PAGE_INVALID _RPAGE_SW0 macro
602 return (pte_raw(pte) & cpu_to_be64(_PAGE_INVALID | _PAGE_PTE)) == in pte_present()
603 cpu_to_be64(_PAGE_INVALID | _PAGE_PTE); in pte_present()
925 if (pmd_raw(pmd) & cpu_to_be64(_PAGE_PRESENT | _PAGE_INVALID)) in pmd_present()
941 if ((pmd_raw(pmd) & cpu_to_be64(_PAGE_PRESENT | _PAGE_INVALID)) == in pmd_is_serializing()
942 cpu_to_be64(_PAGE_INVALID)) in pmd_is_serializing()
/arch/powerpc/mm/book3s64/
Dhash_hugetlbpage.c136 _PAGE_PRESENT, _PAGE_INVALID, 1); in huge_ptep_modify_prot_start()
Dpgtable.c118 old_pmd = pmd_hugepage_update(vma->vm_mm, address, pmdp, _PAGE_PRESENT, _PAGE_INVALID); in pmdp_invalidate()
447 pte_val = pte_update(vma->vm_mm, addr, ptep, _PAGE_PRESENT, _PAGE_INVALID, 0); in ptep_modify_prot_start()
Dradix_pgtable.c1069 old_pte = __radix_pte_update(ptep, _PAGE_PRESENT, _PAGE_INVALID); in radix__ptep_set_access_flags()
1075 __radix_pte_update(ptep, _PAGE_INVALID, new_pte); in radix__ptep_set_access_flags()
/arch/s390/kernel/
Duv.c172 if (pte_val(entry) & _PAGE_INVALID) in make_secure_pte()
/arch/s390/kvm/
Dpriv.c1176 if (ptev & _PAGE_INVALID) { in __do_essa()