Home
last modified time | relevance | path

Searched refs:eaddr (Results 1 – 14 of 14) sorted by relevance

/arch/blackfin/kernel/cplb-nompu/
Dcplbinit.c101 dcplb_bounds[i_d].eaddr = memory_mtd_start + mtd_size; in generate_cplb_tables_all()
103 dcplb_bounds[i_d].eaddr = memory_end; in generate_cplb_tables_all()
108 dcplb_bounds[i_d].eaddr = _ramend; in generate_cplb_tables_all()
113 dcplb_bounds[i_d].eaddr = physical_mem_end; in generate_cplb_tables_all()
118 dcplb_bounds[i_d].eaddr = ASYNC_BANK0_BASE; in generate_cplb_tables_all()
121 dcplb_bounds[i_d].eaddr = ASYNC_BANK3_BASE + ASYNC_BANK3_SIZE; in generate_cplb_tables_all()
124 dcplb_bounds[i_d].eaddr = BOOT_ROM_START; in generate_cplb_tables_all()
127 dcplb_bounds[i_d].eaddr = BOOT_ROM_START + (1 * 1024 * 1024); in generate_cplb_tables_all()
131 dcplb_bounds[i_d].eaddr = L2_START; in generate_cplb_tables_all()
134 dcplb_bounds[i_d].eaddr = L2_START + L2_LENGTH; in generate_cplb_tables_all()
[all …]
Dcplbmgr.c159 unsigned long i_data, base, addr1, eaddr; in icplb_miss() local
168 eaddr = icplb_bounds[idx].eaddr; in icplb_miss()
169 if (addr < eaddr) in icplb_miss()
171 base = eaddr; in icplb_miss()
184 if (addr1 >= base && (addr1 + SIZE_4M) <= eaddr) { in icplb_miss()
206 unsigned long d_data, base, addr1, eaddr; in dcplb_miss() local
215 eaddr = dcplb_bounds[idx].eaddr; in dcplb_miss()
216 if (addr < eaddr) in dcplb_miss()
218 base = eaddr; in dcplb_miss()
231 if (addr1 >= base && (addr1 + SIZE_4M) <= eaddr) { in dcplb_miss()
/arch/sh/mm/
Dcache-sh5.c34 #define sh64_dcache_purge_coloured_phy_page(paddr, eaddr) do { } while (0) argument
37 #define sh64_dcache_purge_virt_page(mm, eaddr) do { } while (0) argument
46 sh64_setup_dtlb_cache_slot(unsigned long eaddr, unsigned long asid, in sh64_setup_dtlb_cache_slot() argument
50 sh64_setup_tlb_slot(dtlb_cache_slot, eaddr, asid, paddr); in sh64_setup_dtlb_cache_slot()
100 static void sh64_icache_inv_user_page(struct vm_area_struct *vma, unsigned long eaddr) in sh64_icache_inv_user_page() argument
108 addr = eaddr; in sh64_icache_inv_user_page()
172 unsigned long eaddr; in sh64_icache_inv_user_page_range() local
201 eaddr = aligned_start; in sh64_icache_inv_user_page_range()
202 while (eaddr < vma_end) { in sh64_icache_inv_user_page_range()
203 sh64_icache_inv_user_page(vma, eaddr); in sh64_icache_inv_user_page_range()
[all …]
Dtlb-sh5.c121 unsigned long eaddr, in sh64_setup_tlb_slot() argument
129 pteh = (unsigned long long)(signed long long)(signed long) eaddr; in sh64_setup_tlb_slot()
/arch/powerpc/kvm/
D44x_tlb.h26 extern int kvmppc_44x_tlb_index(struct kvm_vcpu *vcpu, gva_t eaddr,
28 extern int kvmppc_44x_dtlb_index(struct kvm_vcpu *vcpu, gva_t eaddr);
29 extern int kvmppc_44x_itlb_index(struct kvm_vcpu *vcpu, gva_t eaddr);
88 static inline gpa_t tlb_xlate(struct kvmppc_44x_tlbe *tlbe, gva_t eaddr) in tlb_xlate() argument
92 return get_tlb_raddr(tlbe) | (eaddr & pgmask); in tlb_xlate()
D44x_tlb.c178 int kvmppc_44x_tlb_index(struct kvm_vcpu *vcpu, gva_t eaddr, unsigned int pid, in kvmppc_44x_tlb_index() argument
189 if (eaddr < get_tlb_eaddr(tlbe)) in kvmppc_44x_tlb_index()
192 if (eaddr > get_tlb_end(tlbe)) in kvmppc_44x_tlb_index()
211 int kvmppc_44x_itlb_index(struct kvm_vcpu *vcpu, gva_t eaddr) in kvmppc_44x_itlb_index() argument
215 return kvmppc_44x_tlb_index(vcpu, eaddr, vcpu->arch.pid, as); in kvmppc_44x_itlb_index()
218 int kvmppc_44x_dtlb_index(struct kvm_vcpu *vcpu, gva_t eaddr) in kvmppc_44x_dtlb_index() argument
222 return kvmppc_44x_tlb_index(vcpu, eaddr, vcpu->arch.pid, as); in kvmppc_44x_dtlb_index()
452 gva_t eaddr; in kvmppc_44x_emul_tlbwe() local
457 eaddr = get_tlb_eaddr(tlbe); in kvmppc_44x_emul_tlbwe()
462 eaddr &= ~(bytes - 1); in kvmppc_44x_emul_tlbwe()
[all …]
D44x.c155 gva_t eaddr; in kvmppc_core_vcpu_translate() local
159 eaddr = tr->linear_address; in kvmppc_core_vcpu_translate()
163 index = kvmppc_44x_tlb_index(vcpu, eaddr, pid, as); in kvmppc_core_vcpu_translate()
171 tr->physical_address = tlb_xlate(gtlbe, eaddr); in kvmppc_core_vcpu_translate()
Dbooke.c291 unsigned long eaddr = vcpu->arch.fault_dear; in kvmppc_handle_exit() local
296 gtlb_index = kvmppc_44x_dtlb_index(vcpu, eaddr); in kvmppc_handle_exit()
308 vcpu->arch.paddr_accessed = tlb_xlate(gtlbe, eaddr); in kvmppc_handle_exit()
318 kvmppc_mmu_map(vcpu, eaddr, vcpu->arch.paddr_accessed, gtlbe->tid, in kvmppc_handle_exit()
336 unsigned long eaddr = vcpu->arch.pc; in kvmppc_handle_exit() local
344 gtlb_index = kvmppc_44x_itlb_index(vcpu, eaddr); in kvmppc_handle_exit()
355 gpaddr = tlb_xlate(gtlbe, eaddr); in kvmppc_handle_exit()
365 kvmppc_mmu_map(vcpu, eaddr, gpaddr, gtlbe->tid, in kvmppc_handle_exit()
/arch/arm/mm/
Dalignment.c424 unsigned long eaddr, newaddr; in do_alignment_ldmstm() local
438 newaddr = eaddr = regs->uregs[rn]; in do_alignment_ldmstm()
444 eaddr = newaddr; in do_alignment_ldmstm()
447 eaddr += 4; in do_alignment_ldmstm()
461 if (addr != eaddr) { in do_alignment_ldmstm()
464 instruction_pointer(regs), instr, addr, eaddr); in do_alignment_ldmstm()
475 get32t_unaligned_check(val, eaddr); in do_alignment_ldmstm()
478 put32t_unaligned_check(regs->uregs[rd], eaddr); in do_alignment_ldmstm()
479 eaddr += 4; in do_alignment_ldmstm()
487 get32_unaligned_check(val, eaddr); in do_alignment_ldmstm()
[all …]
/arch/blackfin/include/asm/
Dcplbinit.h48 unsigned long eaddr; /* End of this region. */ member
/arch/sh/include/asm/
Dtlb_64.h59 void sh64_setup_tlb_slot(unsigned long long config_addr, unsigned long eaddr,
/arch/mips/include/asm/
Dsgialib.h109 extern long prom_load(char *name, unsigned long end, unsigned long *pc, unsigned long *eaddr);
/arch/ia64/mm/
Dinit.c156 unsigned long addr, eaddr; in free_initmem() local
159 eaddr = (unsigned long) ia64_imva(__init_end); in free_initmem()
160 while (addr < eaddr) { in free_initmem()
/arch/mips/txx9/generic/
Dsetup.c411 unsigned long eaddr = __pa_symbol(&_text); in prom_free_prom_memory() local
413 if (saddr < eaddr) in prom_free_prom_memory()
414 free_init_pages("prom memory", saddr, eaddr); in prom_free_prom_memory()