/external/qemu/ |
D | softmmu_template.h | 75 physaddr = (physaddr & TARGET_PAGE_MASK) + addr; in glue() 116 if ((addr & TARGET_PAGE_MASK) == (tlb_addr & (TARGET_PAGE_MASK | TLB_INVALID_MASK))) { in glue() 117 if (tlb_addr & ~TARGET_PAGE_MASK) { in glue() 124 } else if (((addr & ~TARGET_PAGE_MASK) + DATA_SIZE - 1) >= TARGET_PAGE_SIZE) { in glue() 168 env->tlb_table[mmu_idx][index].addr_read ^= TARGET_PAGE_MASK; in glue() 169 env->tlb_table[mmu_idx][index].addr_write ^= TARGET_PAGE_MASK; in glue() 172 env->tlb_table[mmu_idx][index + 1].addr_read ^= TARGET_PAGE_MASK; in glue() 173 env->tlb_table[mmu_idx][index + 1].addr_write ^= TARGET_PAGE_MASK; in glue() 204 if ((addr & TARGET_PAGE_MASK) == (tlb_addr & (TARGET_PAGE_MASK | TLB_INVALID_MASK))) { in glue() 205 if (tlb_addr & ~TARGET_PAGE_MASK) { in glue() [all …]
|
D | exec.c | 203 #define SUBPAGE_IDX(addr) ((addr) & ~TARGET_PAGE_MASK) 280 page_set_flags(startaddr & TARGET_PAGE_MASK, in page_init() 321 page_set_flags(addr & TARGET_PAGE_MASK, in page_find_alloc() 682 address &= TARGET_PAGE_MASK; in tb_invalidate_check() 791 phys_pc = tb->page_addr[0] + (tb->pc & ~TARGET_PAGE_MASK); in tb_phys_invalidate() 888 tb_start = tb->pc & ~TARGET_PAGE_MASK; in build_page_bitmap() 894 tb_end = ((tb->pc + tb->size) & ~TARGET_PAGE_MASK); in build_page_bitmap() 933 virt_page2 = (pc + tb->size - 1) & TARGET_PAGE_MASK; in tb_gen_code() 935 if ((pc & TARGET_PAGE_MASK) != virt_page2) { in tb_gen_code() 985 tb_start = tb->page_addr[0] + (tb->pc & ~TARGET_PAGE_MASK); in tb_invalidate_phys_page_range() [all …]
|
D | softmmu_header.h | 94 (addr & (TARGET_PAGE_MASK | (DATA_SIZE - 1))))) { in glue() 115 (addr & (TARGET_PAGE_MASK | (DATA_SIZE - 1))))) { in glue() 140 (addr & (TARGET_PAGE_MASK | (DATA_SIZE - 1))))) { in glue()
|
D | poison.h | 33 #pragma GCC poison TARGET_PAGE_MASK
|
D | exec-all.h | 397 (addr & TARGET_PAGE_MASK))) { in get_phys_addr_code() 400 pd = env1->tlb_table[mmu_idx][page_index].addr_code & ~TARGET_PAGE_MASK; in get_phys_addr_code()
|
D | kqemu.c | 331 end = (start_addr + size + TARGET_PAGE_SIZE - 1) & TARGET_PAGE_MASK; in kqemu_set_phys_mem() 332 start_addr &= TARGET_PAGE_MASK; in kqemu_set_phys_mem() 335 kphys_mem->ram_addr = phys_offset & TARGET_PAGE_MASK; in kqemu_set_phys_mem() 336 io_index = phys_offset & ~TARGET_PAGE_MASK; in kqemu_set_phys_mem()
|
D | arch_init.c | 381 flags = addr & ~TARGET_PAGE_MASK; in ram_load() 382 addr &= TARGET_PAGE_MASK; in ram_load()
|
D | cpu-all.h | 728 #define TARGET_PAGE_MASK ~(TARGET_PAGE_SIZE - 1) macro 729 #define TARGET_PAGE_ALIGN(addr) (((addr) + TARGET_PAGE_SIZE - 1) & TARGET_PAGE_MASK)
|
D | kvm-all.c | 675 ram_addr_t flags = phys_offset & ~TARGET_PAGE_MASK; in kvm_set_phys_mem() 679 if (start_addr & ~TARGET_PAGE_MASK) { in kvm_set_phys_mem()
|
D | cpu-exec.c | 140 phys_page1 = phys_pc & TARGET_PAGE_MASK; in tb_find_slow() 154 virt_page2 = (pc & TARGET_PAGE_MASK) + in tb_find_slow()
|
/external/qemu/memcheck/ |
D | memcheck_util.c | 189 if ((start & TARGET_PAGE_MASK) == in invalidate_tlb_cache() 190 (tlb_addr & (TARGET_PAGE_MASK | TLB_INVALID_MASK))) { in invalidate_tlb_cache() 191 cpu_single_env->tlb_table[1][index].addr_write ^= TARGET_PAGE_MASK; in invalidate_tlb_cache() 194 if ((start & TARGET_PAGE_MASK) == in invalidate_tlb_cache() 195 (tlb_addr & (TARGET_PAGE_MASK | TLB_INVALID_MASK))) { in invalidate_tlb_cache() 196 cpu_single_env->tlb_table[1][index].addr_read ^= TARGET_PAGE_MASK; in invalidate_tlb_cache()
|
D | memcheck.c | 267 const target_ulong end_page = (addr + buf_size - 1) & TARGET_PAGE_MASK; in procdesc_contains_allocs() 269 addr &= TARGET_PAGE_MASK; in procdesc_contains_allocs()
|
/external/qemu/target-i386/ |
D | hax-darwin.c | 61 ram_addr_t flags = phys_offset & ~TARGET_PAGE_MASK; in hax_set_phys_mem() 67 if ( (start_addr & ~TARGET_PAGE_MASK) || (size & ~TARGET_PAGE_MASK)) in hax_set_phys_mem()
|
D | hax-windows.c | 99 ram_addr_t flags = phys_offset & ~TARGET_PAGE_MASK; in hax_set_phys_mem() 108 if ( (start_addr & ~TARGET_PAGE_MASK) || (size & ~TARGET_PAGE_MASK)) in hax_set_phys_mem()
|
D | helper.c | 1000 virt_addr = addr & TARGET_PAGE_MASK; in cpu_x86_handle_mmu_fault() 1243 page_offset = (addr & TARGET_PAGE_MASK) & (page_size - 1); in cpu_x86_handle_mmu_fault() 1244 paddr = (pte & TARGET_PAGE_MASK) + page_offset; in cpu_x86_handle_mmu_fault() 1360 page_offset = (addr & TARGET_PAGE_MASK) & (page_size - 1); in cpu_get_phys_page_debug() 1361 paddr = (pte & TARGET_PAGE_MASK) + page_offset; in cpu_get_phys_page_debug()
|
/external/qemu/target-mips/ |
D | helper.c | 74 target_ulong mask = tlb->PageMask | ~(TARGET_PAGE_MASK << 1); in r4k_map_address() 245 (env->CP0_EntryHi & 0xFF) | (address & (TARGET_PAGE_MASK << 1)); in raise_mmu_exception() 300 ret = tlb_set_page(env, address & TARGET_PAGE_MASK, in cpu_mips_handle_mmu_fault() 301 physical & TARGET_PAGE_MASK, prot, in cpu_mips_handle_mmu_fault() 620 mask = tlb->PageMask | ~(TARGET_PAGE_MASK << 1); in r4k_invalidate_tlb()
|
D | op_helper.c | 1063 env->CP0_PageMask = arg1 & (0x1FFFFFFF & (TARGET_PAGE_MASK << 1)); in helper_mtc0_pagemask() 1119 val = arg1 & ((TARGET_PAGE_MASK << 1) | 0xFF); in helper_mtc0_entryhi() 1523 tlb->VPN = env->CP0_EntryHi & (TARGET_PAGE_MASK << 1); in r4k_fill_tlb() 1576 mask = tlb->PageMask | ~(TARGET_PAGE_MASK << 1); in r4k_helper_tlbp() 1591 mask = tlb->PageMask | ~(TARGET_PAGE_MASK << 1); in r4k_helper_tlbp() 1879 if ((addr & TARGET_PAGE_MASK) == (tlb_addr & (TARGET_PAGE_MASK | TLB_INVALID_MASK))) { in v2p_mmu() 1907 (addr & TARGET_PAGE_MASK), 0)) { in v2p() 1925 if ((ptr & TARGET_PAGE_MASK) != page) { in vstrcpy() 1927 page = ptr & TARGET_PAGE_MASK; in vstrcpy()
|
/external/qemu/hw/ |
D | mips_r4k.c | 109 initrd_offset = (kernel_high + ~TARGET_PAGE_MASK) & TARGET_PAGE_MASK; in load_kernel()
|
D | armv7m.c | 187 sram_size = (ram_size / 2) & TARGET_PAGE_MASK; in armv7m_init()
|
D | goldfish_pipe.c | 1016 uint32_t page = address & TARGET_PAGE_MASK; in pipeDevice_doCommand() 1036 uint32_t page = address & TARGET_PAGE_MASK; in pipeDevice_doCommand()
|
/external/qemu/target-arm/ |
D | translate-android.h | 157 const target_ulong phys_page1 = phys_pc & TARGET_PAGE_MASK; in register_ret_address()
|
/external/qemu/tcg/x86_64/ |
D | tcg-target.c | 590 tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1)); in tcg_out_qemu_ld() 785 tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1)); in tcg_out_qemu_st()
|
/external/qemu/tcg/sparc/ |
D | tcg-target.c | 770 tcg_out_arithi(s, arg0, addr_reg, TARGET_PAGE_MASK | ((1 << s_bits) - 1), in tcg_out_qemu_ld() 982 tcg_out_arithi(s, arg0, addr_reg, TARGET_PAGE_MASK | ((1 << s_bits) - 1), in tcg_out_qemu_st()
|
/external/qemu/tcg/hppa/ |
D | tcg-target.c | 939 tcg_out_andi(s, r0, addrlo, TARGET_PAGE_MASK | ((1 << s_bits) - 1)); in tcg_out_tlb_read()
|
/external/qemu/tcg/i386/ |
D | tcg-target.c | 1024 TARGET_PAGE_MASK | ((1 << s_bits) - 1), 0); in tcg_out_tlb_load()
|