/arch/arm64/mm/ |
D | context.c | 38 #define asid2idx(asid) ((asid) & ~ASID_MASK) argument 44 u32 asid; in get_cpu_asid_bits() local 54 asid = 8; in get_cpu_asid_bits() 57 asid = 16; in get_cpu_asid_bits() 60 return asid; in get_cpu_asid_bits() 66 u32 asid = get_cpu_asid_bits(); in verify_cpu_asid_bits() local 68 if (asid < asid_bits) { in verify_cpu_asid_bits() 74 smp_processor_id(), asid, asid_bits); in verify_cpu_asid_bits() 101 #define asid_gen_match(asid) \ argument 102 (!(((asid) ^ atomic64_read(&asid_generation)) >> asid_bits)) [all …]
|
/arch/arm/mm/ |
D | context.c | 56 u64 context_id, asid; in a15_erratum_get_cpumask() local 67 asid = per_cpu(active_asids, cpu).counter; in a15_erratum_get_cpumask() 68 if (asid == 0) in a15_erratum_get_cpumask() 69 asid = per_cpu(reserved_asids, cpu); in a15_erratum_get_cpumask() 70 if (context_id == asid) in a15_erratum_get_cpumask() 139 u64 asid; in flush_context() local 144 asid = atomic64_xchg(&per_cpu(active_asids, i), 0); in flush_context() 152 if (asid == 0) in flush_context() 153 asid = per_cpu(reserved_asids, i); in flush_context() 154 __set_bit(asid & ~ASID_MASK, asid_map); in flush_context() [all …]
|
/arch/csky/mm/ |
D | asid.c | 21 #define asid2idx(info, asid) (((asid) & ~ASID_MASK(info)) >> (info)->ctxt_shift) argument 27 u64 asid; in flush_context() local 33 asid = atomic64_xchg_relaxed(&active_asid(info, i), 0); in flush_context() 41 if (asid == 0) in flush_context() 42 asid = reserved_asid(info, i); in flush_context() 43 __set_bit(asid2idx(info, asid), info->map); in flush_context() 44 reserved_asid(info, i) = asid; in flush_context() 54 static bool check_update_reserved_asid(struct asid_info *info, u64 asid, in check_update_reserved_asid() argument 70 if (reserved_asid(info, cpu) == asid) { in check_update_reserved_asid() 83 u64 asid = atomic64_read(pasid); in new_context() local [all …]
|
/arch/sh/include/asm/ |
D | mmu_context_32.h | 15 static inline void set_asid(unsigned long asid) in set_asid() argument 17 __raw_writel(asid, MMU_PTEAEX); in set_asid() 25 static inline void set_asid(unsigned long asid) in set_asid() argument 34 : "r" (asid), "m" (__m(MMU_PTEH)), in set_asid() 40 unsigned long asid; in get_asid() local 43 : "=r" (asid) in get_asid() 45 asid &= MMU_CONTEXT_ASID_MASK; in get_asid() 46 return asid; in get_asid()
|
D | mmu_context.h | 57 unsigned long asid = asid_cache(cpu); in get_mmu_context() local 60 if (((cpu_context(cpu, mm) ^ asid) & MMU_CONTEXT_VERSION_MASK) == 0) in get_mmu_context() 65 if (!(++asid & MMU_CONTEXT_ASID_MASK)) { in get_mmu_context() 76 if (!asid) in get_mmu_context() 77 asid = MMU_CONTEXT_FIRST_VERSION; in get_mmu_context() 80 cpu_context(cpu, mm) = asid_cache(cpu) = asid; in get_mmu_context() 129 #define set_asid(asid) do { } while (0) argument 132 #define switch_and_save_asid(asid) (0) argument
|
D | tlbflush.h | 23 extern void local_flush_tlb_one(unsigned long asid, unsigned long page); 35 extern void flush_tlb_one(unsigned long asid, unsigned long page); 42 #define flush_tlb_one(asid, page) local_flush_tlb_one(asid, page) argument
|
/arch/xtensa/include/asm/ |
D | mmu_context.h | 72 unsigned long asid = cpu_asid_cache(cpu); in get_new_mmu_context() local 73 if ((++asid & ASID_MASK) == 0) { in get_new_mmu_context() 79 asid += ASID_USER_FIRST; in get_new_mmu_context() 81 cpu_asid_cache(cpu) = asid; in get_new_mmu_context() 82 mm->context.asid[cpu] = asid; in get_new_mmu_context() 93 unsigned long asid = mm->context.asid[cpu]; in get_mmu_context() local 95 if (asid == NO_CONTEXT || in get_mmu_context() 96 ((asid ^ cpu_asid_cache(cpu)) & ~ASID_MASK)) in get_mmu_context() 104 set_rasid_register(ASID_INSERT(mm->context.asid[cpu])); in activate_context() 119 mm->context.asid[cpu] = NO_CONTEXT; in init_new_context()
|
D | mmu.h | 17 unsigned long asid[NR_CPUS]; member
|
/arch/sh/mm/ |
D | tlbflush_32.c | 21 unsigned long asid; in local_flush_tlb_page() local 24 asid = cpu_asid(cpu, vma->vm_mm); in local_flush_tlb_page() 30 set_asid(asid); in local_flush_tlb_page() 32 local_flush_tlb_one(asid, page); in local_flush_tlb_page() 56 unsigned long asid; in local_flush_tlb_range() local 59 asid = cpu_asid(cpu, mm); in local_flush_tlb_range() 65 set_asid(asid); in local_flush_tlb_range() 68 local_flush_tlb_one(asid, start); in local_flush_tlb_range() 89 unsigned long asid; in local_flush_tlb_kernel_range() local 92 asid = cpu_asid(cpu, &init_mm); in local_flush_tlb_kernel_range() [all …]
|
D | tlb-pteaex.c | 70 void local_flush_tlb_one(unsigned long asid, unsigned long page) in local_flush_tlb_one() argument 74 __raw_writel(asid, MMU_UTLB_ADDRESS_ARRAY2 | MMU_PAGE_ASSOC_BIT); in local_flush_tlb_one() 76 __raw_writel(asid, MMU_ITLB_ADDRESS_ARRAY2 | MMU_PAGE_ASSOC_BIT); in local_flush_tlb_one()
|
D | tlb-sh3.c | 55 void local_flush_tlb_one(unsigned long asid, unsigned long page) in local_flush_tlb_one() argument 67 data = (page & 0xfffe0000) | asid; /* VALID bit is off */ in local_flush_tlb_one()
|
D | tlb-debugfs.c | 94 unsigned long vpn, ppn, asid, size; in tlb_seq_show() local 107 asid = val & MMU_CONTEXT_ASID_MASK; in tlb_seq_show() 126 entry, vpn, ppn, asid, in tlb_seq_show()
|
D | tlb-sh4.c | 65 void local_flush_tlb_one(unsigned long asid, unsigned long page) in local_flush_tlb_one() argument 76 data = page | asid; /* VALID bit is off */ in local_flush_tlb_one()
|
/arch/arm64/include/asm/ |
D | tlbflush.h | 57 #define __TLBI_VADDR(addr, asid) \ argument 61 __ta |= (unsigned long)(asid) << 48; \ 136 #define __TLBI_VADDR_RANGE(addr, asid, scale, num, ttl) \ argument 144 __ta |= (unsigned long)(asid) << 48; \ 248 unsigned long asid; in flush_tlb_mm() local 251 asid = __TLBI_VADDR(0, ASID(mm)); in flush_tlb_mm() 252 __tlbi(aside1is, asid); in flush_tlb_mm() 253 __tlbi_user(aside1is, asid); in flush_tlb_mm() 288 unsigned long asid, addr, pages; in __flush_tlb_range() local 308 asid = ASID(vma->vm_mm); in __flush_tlb_range() [all …]
|
/arch/x86/mm/ |
D | tlb.c | 103 static inline u16 kern_pcid(u16 asid) in kern_pcid() argument 105 VM_WARN_ON_ONCE(asid > MAX_ASID_AVAILABLE); in kern_pcid() 118 VM_WARN_ON_ONCE(asid & (1 << X86_CR3_PTI_PCID_USER_BIT)); in kern_pcid() 133 return asid + 1; in kern_pcid() 139 static inline u16 user_pcid(u16 asid) in user_pcid() argument 141 u16 ret = kern_pcid(asid); in user_pcid() 148 static inline unsigned long build_cr3(pgd_t *pgd, u16 asid) in build_cr3() argument 151 return __sme_pa(pgd) | kern_pcid(asid); in build_cr3() 153 VM_WARN_ON_ONCE(asid != 0); in build_cr3() 158 static inline unsigned long build_cr3_noflush(pgd_t *pgd, u16 asid) in build_cr3_noflush() argument [all …]
|
/arch/mips/lib/ |
D | r3k_dump_tlb.c | 31 unsigned int asid; in dump_tlb() local 35 asid = read_c0_entryhi() & asid_mask; in dump_tlb() 50 (entryhi & asid_mask) == asid)) { in dump_tlb() 69 write_c0_entryhi(asid); in dump_tlb()
|
D | dump_tlb.c | 76 unsigned long s_entryhi, entryhi, asid, mmid; in dump_tlb() local 98 asid = s_mmid = read_c0_memorymapid(); in dump_tlb() 100 asid = s_entryhi & asidmask; in dump_tlb() 139 if (!((entrylo0 | entrylo1) & ENTRYLO_G) && (mmid != asid)) in dump_tlb()
|
/arch/csky/include/asm/ |
D | asid.h | 46 u64 asid, old_active_asid; in asid_check_context() local 48 asid = atomic64_read(pasid); in asid_check_context() 66 !((asid ^ atomic64_read(&info->generation)) >> info->bits) && in asid_check_context() 68 old_active_asid, asid)) in asid_check_context()
|
D | mmu_context.h | 24 #define cpu_asid(mm) (atomic64_read(&mm->context.asid) & ASID_MASK) 26 #define init_new_context(tsk,mm) ({ atomic64_set(&(mm)->context.asid, 0); 0; }) 45 write_mmu_entryhi(next->context.asid.counter); in switch_mm()
|
D | mmu.h | 8 atomic64_t asid; member
|
/arch/xtensa/mm/ |
D | tlb.c | 70 mm->context.asid[cpu] = NO_CONTEXT; in local_flush_tlb_mm() 74 mm->context.asid[cpu] = NO_CONTEXT; in local_flush_tlb_mm() 95 if (mm->context.asid[cpu] == NO_CONTEXT) in local_flush_tlb_range() 99 (unsigned long)mm->context.asid[cpu], start, end); in local_flush_tlb_range() 105 set_rasid_register(ASID_INSERT(mm->context.asid[cpu])); in local_flush_tlb_range() 133 if (mm->context.asid[cpu] == NO_CONTEXT) in local_flush_tlb_page() 139 set_rasid_register(ASID_INSERT(mm->context.asid[cpu])); in local_flush_tlb_page()
|
/arch/mips/dec/ |
D | kn01-berr.c | 79 long asid, entryhi, vaddr; in dec_kn01_be_backend() local 109 asid = read_c0_entryhi(); in dec_kn01_be_backend() 110 entryhi = asid & (PAGE_SIZE - 1); in dec_kn01_be_backend() 118 write_c0_entryhi(asid); in dec_kn01_be_backend()
|
/arch/mips/mm/ |
D | context.c | 24 u64 asid; in get_new_mmu_context() local 34 asid = asid_cache(cpu); in get_new_mmu_context() 36 if (!((asid += cpu_asid_inc()) & cpu_asid_mask(&cpu_data[cpu]))) { in get_new_mmu_context() 42 set_cpu_context(cpu, mm, asid); in get_new_mmu_context() 43 asid_cache(cpu) = asid; in get_new_mmu_context()
|
/arch/x86/kvm/svm/ |
D | sev.c | 112 return sev->asid; in sev_get_asid() 115 static void sev_asid_free(int asid) in sev_asid_free() argument 122 pos = asid - 1; in sev_asid_free() 127 sd->sev_vmcbs[asid] = NULL; in sev_asid_free() 177 int asid, ret; in sev_guest_init() local 186 asid = sev_asid_new(); in sev_guest_init() 187 if (asid < 0) in sev_guest_init() 195 sev->asid = asid; in sev_guest_init() 201 sev_asid_free(asid); in sev_guest_init() 208 int asid = sev_get_asid(kvm); in sev_bind_asid() local [all …]
|
/arch/riscv/include/asm/ |
D | sbi.h | 112 unsigned long asid); 126 unsigned long asid);
|