/arch/m68k/mm/ |
D | sun3kmap.c | 29 static inline void do_page_mapin(unsigned long phys, unsigned long virt, in do_page_mapin() argument 39 sun3_put_pte(virt, pte); in do_page_mapin() 43 print_pte_vaddr(virt); in do_page_mapin() 48 static inline void do_pmeg_mapin(unsigned long phys, unsigned long virt, in do_pmeg_mapin() argument 52 if(sun3_get_segmap(virt & ~SUN3_PMEG_MASK) == SUN3_INVALID_PMEG) in do_pmeg_mapin() 53 mmu_emu_map_pmeg(sun3_get_context(), virt); in do_pmeg_mapin() 56 do_page_mapin(phys, virt, type); in do_pmeg_mapin() 58 virt += PAGE_SIZE; in do_pmeg_mapin() 67 unsigned long offset, virt, ret; in sun3_ioremap() local 88 virt = (unsigned long)area->addr; in sun3_ioremap() [all …]
|
/arch/parisc/kernel/ |
D | kexec.c | 81 void *virt = (void *)__fix_to_virt(FIX_TEXT_KEXEC); in machine_kexec() local 90 desc.addr = (long long)virt; in machine_kexec() 92 reloc = (void *)virt; in machine_kexec() 95 memcpy(virt, dereference_function_descriptor(relocate_new_kernel), in machine_kexec() 98 *(unsigned long *)(virt + kexec_cmdline_offset) = arch->cmdline; in machine_kexec() 99 *(unsigned long *)(virt + kexec_initrd_start_offset) = arch->initrd_start; in machine_kexec() 100 *(unsigned long *)(virt + kexec_initrd_end_offset) = arch->initrd_end; in machine_kexec() 101 *(unsigned long *)(virt + kexec_free_mem_offset) = PAGE0->mem_free; in machine_kexec()
|
/arch/arm/mach-hisi/ |
D | platsmp.c | 108 void __iomem *virt; in hix5hd2_set_scu_boot_addr() local 110 virt = ioremap(start_addr, PAGE_SIZE); in hix5hd2_set_scu_boot_addr() 112 writel_relaxed(0xe51ff004, virt); /* ldr pc, [pc, #-4] */ in hix5hd2_set_scu_boot_addr() 113 writel_relaxed(jump_addr, virt + 4); /* pc jump phy address */ in hix5hd2_set_scu_boot_addr() 114 iounmap(virt); in hix5hd2_set_scu_boot_addr() 144 void __iomem *virt; in hip01_set_boot_addr() local 146 virt = phys_to_virt(start_addr); in hip01_set_boot_addr() 148 writel_relaxed(0xe51ff004, virt); in hip01_set_boot_addr() 149 writel_relaxed(jump_addr, virt + 4); in hip01_set_boot_addr()
|
/arch/powerpc/mm/nohash/ |
D | e500.c | 97 static void settlbcam(int index, unsigned long virt, phys_addr_t phys, in settlbcam() argument 111 TLBCAM[index].MAS2 = virt & PAGE_MASK; in settlbcam() 133 tlbcam_addrs[index].start = virt; in settlbcam() 134 tlbcam_addrs[index].limit = virt + size - 1; in settlbcam() 138 static unsigned long calc_cam_sz(unsigned long ram, unsigned long virt, in calc_cam_sz() argument 142 unsigned int align = __ffs(virt | phys); in calc_cam_sz() 163 static unsigned long map_mem_in_cams_addr(phys_addr_t phys, unsigned long virt, in map_mem_in_cams_addr() argument 181 cam_sz = calc_cam_sz(boundary, virt, phys); in map_mem_in_cams_addr() 183 settlbcam(i, virt, phys, cam_sz, pgprot_val(prot), 0); in map_mem_in_cams_addr() 187 virt += cam_sz; in map_mem_in_cams_addr() [all …]
|
D | 44x.c | 55 static void __init ppc44x_pin_tlb(unsigned int virt, unsigned int phys) in ppc44x_pin_tlb() argument 70 "r" (virt | PPC44x_TLB_VALID | PPC44x_TLB_256M), in ppc44x_pin_tlb() 125 static void __init ppc47x_pin_tlb(unsigned int virt, unsigned int phys) in ppc47x_pin_tlb() argument 141 virt, phys, bolted); in ppc47x_pin_tlb() 157 "r" (virt | PPC47x_TLB0_VALID | PPC47x_TLB0_256M), in ppc47x_pin_tlb()
|
/arch/x86/pci/ |
D | mmconfig_64.c | 23 if (cfg && cfg->virt) in pci_dev_base() 24 return cfg->virt + (PCI_MMCFG_BUS_OFFSET(bus) | (devfn << 12)); in pci_dev_base() 139 cfg->virt = mcfg_ioremap(cfg); in pci_mmcfg_arch_map() 140 if (!cfg->virt) { in pci_mmcfg_arch_map() 150 if (cfg && cfg->virt) { in pci_mmcfg_arch_unmap() 151 iounmap(cfg->virt + PCI_MMCFG_BUS_OFFSET(cfg->start_bus)); in pci_mmcfg_arch_unmap() 152 cfg->virt = NULL; in pci_mmcfg_arch_unmap()
|
/arch/arm/mm/ |
D | ioremap.c | 110 int ioremap_page(unsigned long virt, unsigned long phys, in ioremap_page() argument 113 return ioremap_page_range(virt, virt + PAGE_SIZE, phys, in ioremap_page() 148 static void unmap_area_sections(unsigned long virt, unsigned long size) in unmap_area_sections() argument 150 unsigned long addr = virt, end = virt + (size & ~(SZ_1M - 1)); in unmap_area_sections() 184 flush_tlb_kernel_range(virt, end); in unmap_area_sections() 188 remap_area_sections(unsigned long virt, unsigned long pfn, in remap_area_sections() argument 191 unsigned long addr = virt, end = virt + size; in remap_area_sections() 198 unmap_area_sections(virt, size); in remap_area_sections() 215 remap_area_supersections(unsigned long virt, unsigned long pfn, in remap_area_supersections() argument 218 unsigned long addr = virt, end = virt + size; in remap_area_supersections() [all …]
|
/arch/riscv/crypto/ |
D | aes-riscv64-glue.c | 147 aes_ecb_encrypt_zvkned(ctx, walk.src.virt.addr, in riscv64_aes_ecb_crypt() 148 walk.dst.virt.addr, in riscv64_aes_ecb_crypt() 151 aes_ecb_decrypt_zvkned(ctx, walk.src.virt.addr, in riscv64_aes_ecb_crypt() 152 walk.dst.virt.addr, in riscv64_aes_ecb_crypt() 185 aes_cbc_encrypt_zvkned(ctx, walk.src.virt.addr, in riscv64_aes_cbc_crypt() 186 walk.dst.virt.addr, in riscv64_aes_cbc_crypt() 190 aes_cbc_decrypt_zvkned(ctx, walk.src.virt.addr, in riscv64_aes_cbc_crypt() 191 walk.dst.virt.addr, in riscv64_aes_cbc_crypt() 259 aes_cbc_cts_crypt_zvkned(ctx, walk.src.virt.addr, walk.dst.virt.addr, in riscv64_aes_cbc_cts_crypt() 304 aes_ctr32_crypt_zvkned_zvkb(ctx, walk.src.virt.addr, in riscv64_aes_ctr_crypt() [all …]
|
D | chacha-riscv64-glue.c | 41 chacha20_zvkb(ctx->key, walk.src.virt.addr, in riscv64_chacha20_crypt() 42 walk.dst.virt.addr, nbytes, iv); in riscv64_chacha20_crypt() 46 memcpy(block_buffer, walk.src.virt.addr + nbytes, in riscv64_chacha20_crypt() 50 memcpy(walk.dst.virt.addr + nbytes, block_buffer, in riscv64_chacha20_crypt()
|
/arch/xtensa/mm/ |
D | cache.c | 144 unsigned long virt; in flush_dcache_folio() local 158 virt = TLBTEMP_BASE_1 + (phys & DCACHE_ALIAS_MASK); in flush_dcache_folio() 159 __flush_invalidate_dcache_page_alias(virt, phys); in flush_dcache_folio() 161 virt = TLBTEMP_BASE_1 + (temp & DCACHE_ALIAS_MASK); in flush_dcache_folio() 164 __flush_invalidate_dcache_page_alias(virt, phys); in flush_dcache_folio() 167 __invalidate_icache_page_alias(virt, phys); in flush_dcache_folio() 203 unsigned long virt = TLBTEMP_BASE_1 + (address & DCACHE_ALIAS_MASK); in local_flush_cache_page() local 206 __flush_invalidate_dcache_page_alias(virt, phys); in local_flush_cache_page() 207 __invalidate_icache_page_alias(virt, phys); in local_flush_cache_page()
|
/arch/powerpc/kernel/ |
D | exceptions-64s.S | 259 .macro GEN_BRANCH_TO_COMMON name, virt argument 265 .if \virt 281 .macro GEN_INT_ENTRY name, virt, ool=0 306 .if !\virt 373 GEN_BRANCH_TO_COMMON \name \virt 1030 GEN_INT_ENTRY system_reset, virt=0 1056 GEN_INT_ENTRY system_reset, virt=0 1161 GEN_INT_ENTRY machine_check_early, virt=0 1168 GEN_INT_ENTRY machine_check_early, virt=0 1306 GEN_INT_ENTRY machine_check, virt=0 [all …]
|
/arch/powerpc/boot/ |
D | oflib.c | 152 unsigned int of_claim(unsigned long virt, unsigned long size, in of_claim() argument 161 return of_call_prom("claim", 3, 1, virt, size, align); in of_claim() 164 align, size, virt); in of_claim() 168 align, size, virt); in of_claim() 171 0x12, size, virt, virt); in of_claim() 172 return virt; in of_claim()
|
/arch/arm64/crypto/ |
D | sm4-neon-glue.c | 47 const u8 *src = walk.src.virt.addr; in sm4_ecb_do_crypt() 48 u8 *dst = walk.dst.virt.addr; in sm4_ecb_do_crypt() 94 const u8 *src = walk.src.virt.addr; in sm4_cbc_encrypt() 95 u8 *dst = walk.dst.virt.addr; in sm4_cbc_encrypt() 125 const u8 *src = walk.src.virt.addr; in sm4_cbc_decrypt() 126 u8 *dst = walk.dst.virt.addr; in sm4_cbc_decrypt() 158 const u8 *src = walk.src.virt.addr; in sm4_cfb_encrypt() 159 u8 *dst = walk.dst.virt.addr; in sm4_cfb_encrypt() 196 const u8 *src = walk.src.virt.addr; in sm4_cfb_decrypt() 197 u8 *dst = walk.dst.virt.addr; in sm4_cfb_decrypt() [all …]
|
D | aes-glue.c | 191 aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt() 211 aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt() 229 aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr, in cbc_encrypt_walk() 258 aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr, in cbc_decrypt_walk() 327 aes_cbc_cts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in cts_cbc_encrypt() 384 aes_cbc_cts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in cts_cbc_decrypt() 420 aes_essiv_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in essiv_cbc_encrypt() 442 aes_essiv_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in essiv_cbc_decrypt() 462 const u8 *src = walk.src.virt.addr; in xctr_encrypt() 464 u8 *dst = walk.dst.virt.addr; in xctr_encrypt() [all …]
|
D | sm4-ce-glue.c | 123 const u8 *src = walk.src.virt.addr; in sm4_ecb_do_crypt() 124 u8 *dst = walk.dst.virt.addr; in sm4_ecb_do_crypt() 171 const u8 *src = walk.src.virt.addr; in sm4_cbc_crypt() 172 u8 *dst = walk.dst.virt.addr; in sm4_cbc_crypt() 262 sm4_ce_cbc_cts_enc(ctx->rkey_enc, walk.dst.virt.addr, in sm4_cbc_cts_crypt() 263 walk.src.virt.addr, walk.iv, walk.nbytes); in sm4_cbc_cts_crypt() 265 sm4_ce_cbc_cts_dec(ctx->rkey_dec, walk.dst.virt.addr, in sm4_cbc_cts_crypt() 266 walk.src.virt.addr, walk.iv, walk.nbytes); in sm4_cbc_cts_crypt() 294 const u8 *src = walk.src.virt.addr; in sm4_cfb_encrypt() 295 u8 *dst = walk.dst.virt.addr; in sm4_cfb_encrypt() [all …]
|
/arch/x86/crypto/ |
D | aesni-intel_glue.c | 303 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt() 325 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt() 347 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, in cbc_encrypt() 369 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr, in cbc_decrypt() 428 aesni_cts_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, in cts_cbc_encrypt() 484 aesni_cts_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr, in cts_cbc_decrypt() 523 static_call(aesni_ctr_enc_tfm)(ctx, walk.dst.virt.addr, in ctr_crypt() 524 walk.src.virt.addr, in ctr_crypt() 531 crypto_xor_cpy(walk.dst.virt.addr + walk.nbytes - nbytes, in ctr_crypt() 532 walk.src.virt.addr + walk.nbytes - nbytes, in ctr_crypt() [all …]
|
D | sm4_aesni_avx_glue.c | 50 const u8 *src = walk.src.virt.addr; in ecb_do_crypt() 51 u8 *dst = walk.dst.virt.addr; in ecb_do_crypt() 105 const u8 *src = walk.src.virt.addr; in sm4_cbc_encrypt() 106 u8 *dst = walk.dst.virt.addr; in sm4_cbc_encrypt() 138 const u8 *src = walk.src.virt.addr; in sm4_avx_cbc_decrypt() 139 u8 *dst = walk.dst.virt.addr; in sm4_avx_cbc_decrypt() 204 const u8 *src = walk.src.virt.addr; in sm4_cfb_encrypt() 205 u8 *dst = walk.dst.virt.addr; in sm4_cfb_encrypt() 244 const u8 *src = walk.src.virt.addr; in sm4_avx_cfb_decrypt() 245 u8 *dst = walk.dst.virt.addr; in sm4_avx_cfb_decrypt() [all …]
|
/arch/um/kernel/ |
D | physmem.c | 41 void map_memory(unsigned long virt, unsigned long phys, unsigned long len, in map_memory() argument 48 err = os_map_memory((void *) virt, fd, offset, len, r, w, x); in map_memory() 55 "err = %d\n", virt, fd, offset, len, r, w, x, err); in map_memory() 188 return region->virt; in find_iomem() 211 region->virt = iomem_start; in setup_iomem() 212 region->phys = __pa(region->virt); in setup_iomem()
|
/arch/arm/crypto/ |
D | aes-ce-glue.c | 179 ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt() 199 ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt() 217 ce_aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr, in cbc_encrypt_walk() 247 ce_aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr, in cbc_decrypt_walk() 317 ce_aes_cbc_cts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in cts_cbc_encrypt() 375 ce_aes_cbc_cts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in cts_cbc_decrypt() 394 ce_aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ctr_encrypt() 403 u8 *tdst = walk.dst.virt.addr; in ctr_encrypt() 404 u8 *tsrc = walk.src.virt.addr; in ctr_encrypt() 486 ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in xts_encrypt() [all …]
|
/arch/arm64/include/asm/ |
D | mmu.h | 66 extern void __iomem *early_io_map(phys_addr_t phys, unsigned long virt); 67 extern void create_mapping_noalloc(phys_addr_t phys, unsigned long virt, 70 unsigned long virt, phys_addr_t size,
|
/arch/arm64/kvm/hyp/include/nvhe/ |
D | memory.h | 79 #define hyp_virt_to_page(virt) hyp_phys_to_page(__hyp_pa(virt)) argument 80 #define hyp_virt_to_pfn(virt) hyp_phys_to_pfn(__hyp_pa(virt)) argument
|
/arch/arm/mach-axxia/ |
D | platsmp.c | 24 u32 *virt = (u32 *) phys_to_virt(release_phys); in write_release_addr() local 25 writel_relaxed(__pa_symbol(secondary_startup), virt); in write_release_addr() 28 __cpuc_flush_dcache_area(virt, sizeof(u32)); in write_release_addr()
|
/arch/um/include/shared/ |
D | mem.h | 12 static inline unsigned long uml_to_phys(void *virt) in uml_to_phys() argument 14 return(((unsigned long) virt) - uml_physmem); in uml_to_phys()
|
/arch/sparc/crypto/ |
D | des_glue.c | 112 des_sparc64_ecb_crypt(walk.src.virt.addr, walk.dst.virt.addr, in __ecb_crypt() 154 des_sparc64_cbc_encrypt(walk.src.virt.addr, in __cbc_crypt() 155 walk.dst.virt.addr, in __cbc_crypt() 160 des_sparc64_cbc_decrypt(walk.src.virt.addr, in __cbc_crypt() 161 walk.dst.virt.addr, in __cbc_crypt() 263 des3_ede_sparc64_ecb_crypt(K, walk.src.virt.addr, in __ecb3_crypt() 264 walk.dst.virt.addr, in __ecb3_crypt() 310 des3_ede_sparc64_cbc_encrypt(K, walk.src.virt.addr, in __cbc3_crypt() 311 walk.dst.virt.addr, in __cbc3_crypt() 316 des3_ede_sparc64_cbc_decrypt(K, walk.src.virt.addr, in __cbc3_crypt() [all …]
|
/arch/powerpc/crypto/ |
D | aes-spe-glue.c | 196 ppc_encrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr, in ppc_ecb_crypt() 199 ppc_decrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr, in ppc_ecb_crypt() 235 ppc_encrypt_cbc(walk.dst.virt.addr, walk.src.virt.addr, in ppc_cbc_crypt() 239 ppc_decrypt_cbc(walk.dst.virt.addr, walk.src.virt.addr, in ppc_cbc_crypt() 276 ppc_crypt_ctr(walk.dst.virt.addr, walk.src.virt.addr, in ppc_ctr_crypt() 304 ppc_encrypt_xts(walk.dst.virt.addr, walk.src.virt.addr, in ppc_xts_crypt() 308 ppc_decrypt_xts(walk.dst.virt.addr, walk.src.virt.addr, in ppc_xts_crypt()
|