Home
last modified time | relevance | path

Searched refs:asid (Results 1 – 25 of 36) sorted by relevance

12

/arch/arm/mm/
Dcontext.c33 unsigned int asid; in __new_context() local
36 asid = ++cpu_last_asid; in __new_context()
37 if (asid == 0) in __new_context()
38 asid = cpu_last_asid = ASID_FIRST_VERSION; in __new_context()
44 if (unlikely((asid & ~ASID_MASK) == 0)) { in __new_context()
45 asid = ++cpu_last_asid; in __new_context()
63 mm->context.id = asid; in __new_context()
/arch/sh/mm/
Dtlbflush_32.c21 unsigned long asid; in local_flush_tlb_page() local
24 asid = cpu_asid(cpu, vma->vm_mm); in local_flush_tlb_page()
30 set_asid(asid); in local_flush_tlb_page()
32 local_flush_tlb_one(asid, page); in local_flush_tlb_page()
56 unsigned long asid; in local_flush_tlb_range() local
59 asid = cpu_asid(cpu, mm); in local_flush_tlb_range()
65 set_asid(asid); in local_flush_tlb_range()
68 local_flush_tlb_one(asid, start); in local_flush_tlb_range()
89 unsigned long asid; in local_flush_tlb_kernel_range() local
92 asid = cpu_asid(cpu, &init_mm); in local_flush_tlb_kernel_range()
[all …]
Dtlb-sh3.c75 void local_flush_tlb_one(unsigned long asid, unsigned long page) in local_flush_tlb_one() argument
87 data = (page & 0xfffe0000) | asid; /* VALID bit is off */ in local_flush_tlb_one()
Dtlb-sh4.c82 void __uses_jump_to_uncached local_flush_tlb_one(unsigned long asid, in local_flush_tlb_one() argument
94 data = page | asid; /* VALID bit is off */ in local_flush_tlb_one()
Dtlb-sh5.c122 unsigned long asid, in sh64_setup_tlb_slot() argument
134 pteh |= (asid << PTEH_ASID_SHIFT) | PTEH_VALID; in sh64_setup_tlb_slot()
/arch/sh/include/asm/
Dmmu_context_32.h13 static inline void set_asid(unsigned long asid) in set_asid() argument
22 : "r" (asid), "m" (__m(MMU_PTEH)), in set_asid()
28 unsigned long asid; in get_asid() local
31 : "=r" (asid) in get_asid()
33 asid &= MMU_CONTEXT_ASID_MASK; in get_asid()
34 return asid; in get_asid()
Dmmu_context.h54 unsigned long asid = asid_cache(cpu); in get_mmu_context() local
57 if (((cpu_context(cpu, mm) ^ asid) & MMU_CONTEXT_VERSION_MASK) == 0) in get_mmu_context()
62 if (!(++asid & MMU_CONTEXT_ASID_MASK)) { in get_mmu_context()
81 if (!asid) in get_mmu_context()
82 asid = MMU_CONTEXT_FIRST_VERSION; in get_mmu_context()
85 cpu_context(cpu, mm) = asid_cache(cpu) = asid; in get_mmu_context()
131 #define set_asid(asid) do { } while (0) argument
134 #define switch_and_save_asid(asid) (0) argument
Dtlbflush.h22 extern void local_flush_tlb_one(unsigned long asid, unsigned long page);
32 extern void flush_tlb_one(unsigned long asid, unsigned long page);
39 #define flush_tlb_one(asid, page) local_flush_tlb_one(asid, page) argument
Dmmu_context_64.h42 static inline void set_asid(unsigned long asid) in set_asid() argument
48 sr = (sr & SR_ASID_MASK) | (asid << SR_ASID_SHIFT); in set_asid()
Dtlb_64.h60 unsigned long asid, unsigned long paddr);
67 #define sh64_setup_tlb_slot(conf, virt, asid, phys) do { } while (0) argument
/arch/mips/lib/
Dr3k_dump_tlb.c21 unsigned int asid; in dump_tlb() local
24 asid = read_c0_entryhi() & 0xfc0; in dump_tlb()
38 && (entryhi & 0xfc0) == asid) { in dump_tlb()
57 write_c0_entryhi(asid); in dump_tlb()
Ddump_tlb.c42 unsigned long s_entryhi, entryhi, asid; in dump_tlb() local
48 asid = s_entryhi & 0xff; in dump_tlb()
62 && (entryhi & 0xff) == asid) { in dump_tlb()
/arch/avr32/mm/
Dtlb.c117 static void __flush_tlb_page(unsigned long asid, unsigned long page) in __flush_tlb_page() argument
126 tlbehi = page | asid; in __flush_tlb_page()
153 unsigned long flags, asid; in flush_tlb_page() local
156 asid = vma->vm_mm->context & MMU_CONTEXT_ASID_MASK; in flush_tlb_page()
162 set_asid(asid); in flush_tlb_page()
165 __flush_tlb_page(asid, page); in flush_tlb_page()
190 unsigned long asid; in flush_tlb_range() local
193 asid = mm->context & MMU_CONTEXT_ASID_MASK; in flush_tlb_range()
202 set_asid(asid); in flush_tlb_range()
206 __flush_tlb_page(asid, start); in flush_tlb_range()
[all …]
/arch/avr32/include/asm/
Dmmu_context.h87 static inline void set_asid(unsigned long asid) in set_asid() argument
90 sysreg_write(TLBEHI, asid & MMU_CONTEXT_ASID_MASK); in set_asid()
96 unsigned long asid; in get_asid() local
98 asid = sysreg_read(TLBEHI); in get_asid()
99 return asid & MMU_CONTEXT_ASID_MASK; in get_asid()
/arch/sh/include/cpu-sh4/cpu/
Dubc.h38 #define UBC_CBR_AIV_SET(asid) (((asid)<<UBC_CBR_AIV_SHIFT) & UBC_CBR_AIV_MASK) argument
/arch/mips/dec/
Dkn01-berr.c86 long asid, entryhi, vaddr; in dec_kn01_be_backend() local
116 asid = read_c0_entryhi(); in dec_kn01_be_backend()
117 entryhi = asid & (PAGE_SIZE - 1); in dec_kn01_be_backend()
125 write_c0_entryhi(asid); in dec_kn01_be_backend()
/arch/mips/include/asm/
Dmmu_context.h96 unsigned long asid = asid_cache(cpu); in get_new_mmu_context() local
98 if (! ((asid += ASID_INC) & ASID_MASK) ) { in get_new_mmu_context()
102 if (!asid) /* fix version if needed */ in get_new_mmu_context()
103 asid = ASID_FIRST_VERSION; in get_new_mmu_context()
105 cpu_context(cpu, mm) = asid_cache(cpu) = asid; in get_new_mmu_context()
Dsmtc.h43 void smtc_flush_tlb_asid(unsigned long asid);
/arch/sh/lib64/
Ddbg.c79 unsigned int valid, shared, asid, epn, cb, ppn; in print_single_tlb() local
99 asid = GET_ASID(pteH); in print_single_tlb()
107 ppn, epn, asid, shared, cb, pSize, pProt); in print_single_tlb()
393 lookup_mmu_side(unsigned long base, unsigned long Eaddr, unsigned long asid) in lookup_mmu_side() argument
405 if ((unsigned long) GET_ASID(pteH) == asid) in lookup_mmu_side()
413 unsigned long asid = get_asid(); in lookup_dtlb() local
414 return (lookup_mmu_side((u64) DTLB_BASE, Eaddr, asid)); in lookup_dtlb()
419 unsigned long asid = get_asid(); in lookup_itlb() local
420 return (lookup_mmu_side((u64) ITLB_BASE, Eaddr, asid)); in lookup_itlb()
/arch/sh/kernel/
Dprocess_32.c212 static void ubc_set_tracing(int asid, unsigned long pc) in ubc_set_tracing() argument
218 val |= (UBC_CBR_AIE | UBC_CBR_AIV_SET(asid)); in ubc_set_tracing()
235 ctrl_outb(asid, UBC_BASRA); in ubc_set_tracing()
277 int asid = 0; in __switch_to() local
279 asid |= cpu_asid(smp_processor_id(), next->mm); in __switch_to()
281 ubc_set_tracing(asid, next->thread.ubc_pc); in __switch_to()
/arch/xtensa/include/asm/
Dmmu_context.h108 unsigned long asid = asid_cache; in switch_mm() local
112 if (next->context == NO_CONTEXT || ((next->context^asid) & ~ASID_MASK)) in switch_mm()
/arch/arm/include/asm/
Dtlbflush.h313 const int asid = ASID(mm); in local_flush_tlb_mm() local
331 asm("mcr p15, 0, %0, c8, c7, 2" : : "r" (asid) : "cc"); in local_flush_tlb_mm()
333 asm("mcr p15, 0, %0, c8, c6, 2" : : "r" (asid) : "cc"); in local_flush_tlb_mm()
335 asm("mcr p15, 0, %0, c8, c5, 2" : : "r" (asid) : "cc"); in local_flush_tlb_mm()
/arch/powerpc/kvm/
D44x_tlb.c272 void kvmppc_mmu_map(struct kvm_vcpu *vcpu, u64 gvaddr, gpa_t gpaddr, u64 asid, in kvmppc_mmu_map() argument
334 stlbe.tid = !(asid & 0xff); in kvmppc_mmu_map()
451 u64 asid; in kvmppc_44x_emul_tlbwe() local
465 asid = (tlbe->word0 & PPC44x_TLB_TS) | tlbe->tid; in kvmppc_44x_emul_tlbwe()
468 kvmppc_mmu_map(vcpu, eaddr, gpaddr, asid, flags, bytes, gtlb_index); in kvmppc_44x_emul_tlbwe()
/arch/mips/kernel/
Dsmtc.c1274 unsigned long flags, mtflags, tcstat, prevhalt, asid; in smtc_get_new_mmu_context() local
1296 asid = asid_cache(cpu); in smtc_get_new_mmu_context()
1299 if (!((asid += ASID_INC) & ASID_MASK) ) { in smtc_get_new_mmu_context()
1323 if (!asid) /* fix version if needed */ in smtc_get_new_mmu_context()
1324 asid = ASID_FIRST_VERSION; in smtc_get_new_mmu_context()
1327 } while (smtc_live_asid[tlb][(asid & ASID_MASK)]); in smtc_get_new_mmu_context()
1335 cpu_context(i, mm) = asid_cache(i) = asid; in smtc_get_new_mmu_context()
1351 void smtc_flush_tlb_asid(unsigned long asid) in smtc_flush_tlb_asid() argument
1365 if ((ehi & ASID_MASK) == asid) { in smtc_flush_tlb_asid()
/arch/m32r/mm/
Dfault.c494 unsigned long asid; in local_flush_tlb_range() local
496 asid = mm_context(mm) & MMU_CONTEXT_ASID_MASK; in local_flush_tlb_range()
501 start |= asid; in local_flush_tlb_range()
502 end |= asid; in local_flush_tlb_range()

12