/arch/xtensa/mm/ |
D | cache.c | 141 unsigned long phys = page_to_phys(page); in flush_dcache_page() local 143 unsigned long alias = !(DCACHE_ALIAS_EQ(temp, phys)); in flush_dcache_page() 157 virt = TLBTEMP_BASE_1 + (phys & DCACHE_ALIAS_MASK); in flush_dcache_page() 158 __flush_invalidate_dcache_page_alias(virt, phys); in flush_dcache_page() 163 __flush_invalidate_dcache_page_alias(virt, phys); in flush_dcache_page() 166 __invalidate_icache_page_alias(virt, phys); in flush_dcache_page() 198 unsigned long phys = page_to_phys(pfn_to_page(pfn)); in local_flush_cache_page() local 202 __flush_invalidate_dcache_page_alias(virt, phys); in local_flush_cache_page() 203 __invalidate_icache_page_alias(virt, phys); in local_flush_cache_page() 228 unsigned long phys = page_to_phys(page); in update_mmu_cache() local [all …]
|
/arch/m68k/mm/ |
D | sun3kmap.c | 29 static inline void do_page_mapin(unsigned long phys, unsigned long virt, in do_page_mapin() argument 35 ptep = pfn_pte(phys >> PAGE_SHIFT, PAGE_KERNEL); in do_page_mapin() 48 static inline void do_pmeg_mapin(unsigned long phys, unsigned long virt, in do_pmeg_mapin() argument 56 do_page_mapin(phys, virt, type); in do_pmeg_mapin() 57 phys += PAGE_SIZE; in do_pmeg_mapin() 63 void __iomem *sun3_ioremap(unsigned long phys, unsigned long size, in sun3_ioremap() argument 74 offset = phys & (PAGE_SIZE-1); in sun3_ioremap() 75 phys &= ~(PAGE_SIZE-1); in sun3_ioremap() 98 do_pmeg_mapin(phys, virt, type, seg_pages); in sun3_ioremap() 101 phys += seg_pages * PAGE_SIZE; in sun3_ioremap() [all …]
|
/arch/um/kernel/ |
D | physmem.c | 41 void map_memory(unsigned long virt, unsigned long phys, unsigned long len, in map_memory() argument 47 fd = phys_mapping(phys, &offset); in map_memory() 118 int phys_mapping(unsigned long phys, unsigned long long *offset_out) in phys_mapping() argument 122 if (phys < physmem_size) { in phys_mapping() 124 *offset_out = phys; in phys_mapping() 126 else if (phys < __pa(end_iomem)) { in phys_mapping() 130 if ((phys >= region->phys) && in phys_mapping() 131 (phys < region->phys + region->size)) { in phys_mapping() 133 *offset_out = phys - region->phys; in phys_mapping() 139 else if (phys < __pa(end_iomem) + highmem) { in phys_mapping() [all …]
|
/arch/arm64/boot/dts/microchip/ |
D | sparx5_pcb135_board.dtsi | 377 phys = <&serdes 13>; 384 phys = <&serdes 13>; 391 phys = <&serdes 13>; 398 phys = <&serdes 13>; 405 phys = <&serdes 14>; 412 phys = <&serdes 14>; 419 phys = <&serdes 14>; 426 phys = <&serdes 14>; 433 phys = <&serdes 15>; 440 phys = <&serdes 15>; [all …]
|
/arch/powerpc/mm/nohash/ |
D | fsl_booke.c | 60 phys_addr_t phys; member 77 return tlbcam_addrs[b].phys + (va - tlbcam_addrs[b].start); in v_block_mapped() 88 if (pa >= tlbcam_addrs[b].phys in p_block_mapped() 90 +tlbcam_addrs[b].phys) in p_block_mapped() 91 return tlbcam_addrs[b].start+(pa-tlbcam_addrs[b].phys); in p_block_mapped() 103 static void settlbcam(int index, unsigned long virt, phys_addr_t phys, in settlbcam() argument 125 TLBCAM[index].MAS3 = (phys & MAS3_RPN) | MAS3_SX | MAS3_SR; in settlbcam() 128 TLBCAM[index].MAS7 = (u64)phys >> 32; in settlbcam() 138 tlbcam_addrs[index].phys = phys; in settlbcam() 142 phys_addr_t phys) in calc_cam_sz() argument [all …]
|
/arch/arm64/kvm/hyp/include/nvhe/ |
D | memory.h | 26 #define __hyp_va(phys) ((void *)((phys_addr_t)(phys) - hyp_physvirt_offset)) argument 28 static inline void *hyp_phys_to_virt(phys_addr_t phys) in hyp_phys_to_virt() argument 30 return __hyp_va(phys); in hyp_phys_to_virt() 38 #define hyp_phys_to_pfn(phys) ((phys) >> PAGE_SHIFT) argument 40 #define hyp_phys_to_page(phys) (&hyp_vmemmap[hyp_phys_to_pfn(phys)]) argument
|
/arch/powerpc/mm/kasan/ |
D | book3s_32.c | 15 phys_addr_t phys; in kasan_init_region() local 26 phys = memblock_phys_alloc_range(k_size, k_size, 0, in kasan_init_region() 28 if (!phys) in kasan_init_region() 31 setbat(idx, k_nobat, phys, k_size, PAGE_KERNEL); in kasan_init_region() 38 phys = memblock_phys_alloc_range(k_end - k_nobat, PAGE_SIZE, 0, in kasan_init_region() 40 if (!phys) in kasan_init_region() 52 pte_t pte = pfn_pte(PHYS_PFN(phys + k_cur - k_nobat), PAGE_KERNEL); in kasan_init_region()
|
/arch/arm64/kernel/ |
D | acpi.c | 94 void __init __iomem *__acpi_map_table(unsigned long phys, unsigned long size) in __acpi_map_table() argument 99 return early_memremap(phys, size); in __acpi_map_table() 276 void __iomem *acpi_os_ioremap(acpi_physical_address phys, acpi_size size) in acpi_os_ioremap() argument 287 if (phys < md->phys_addr || phys >= end) in acpi_os_ioremap() 290 if (phys + size > end) { in acpi_os_ioremap() 313 if (memblock_is_map_memory(phys) || in acpi_os_ioremap() 314 !memblock_is_region_memory(phys, size)) { in acpi_os_ioremap() 315 pr_warn(FW_BUG "requested region covers kernel memory @ %pa\n", &phys); in acpi_os_ioremap() 348 if (memblock_is_map_memory(phys)) in acpi_os_ioremap() 349 return (void __iomem *)__phys_to_virt(phys); in acpi_os_ioremap() [all …]
|
/arch/sh/include/asm/ |
D | mmu.h | 62 int pmb_bolt_mapping(unsigned long virt, phys_addr_t phys, 64 void __iomem *pmb_remap_caller(phys_addr_t phys, unsigned long size, 71 pmb_bolt_mapping(unsigned long virt, phys_addr_t phys, in pmb_bolt_mapping() argument 78 pmb_remap_caller(phys_addr_t phys, unsigned long size, in pmb_remap_caller() argument 100 pmb_remap(phys_addr_t phys, unsigned long size, pgprot_t prot) in pmb_remap() argument 102 return pmb_remap_caller(phys, size, prot, __builtin_return_address(0)); in pmb_remap()
|
/arch/sh/boards/mach-sdk7786/ |
D | sram.c | 20 unsigned long phys; in fpga_sram_init() local 44 phys = (area << 26) + SZ_64M - SZ_4K; in fpga_sram_init() 50 vaddr = ioremap(phys, SZ_2K); in fpga_sram_init() 58 SZ_2K >> 10, phys, phys + SZ_2K - 1, area); in fpga_sram_init()
|
/arch/arm64/kvm/hyp/nvhe/ |
D | mm.c | 31 unsigned long phys, enum kvm_pgtable_prot prot) in __pkvm_create_mappings() argument 36 err = kvm_pgtable_hyp_map(&pkvm_pgtable, start, size, phys, prot); in __pkvm_create_mappings() 58 unsigned long __pkvm_create_private_mapping(phys_addr_t phys, size_t size, in __pkvm_create_private_mapping() argument 66 size = size + offset_in_page(phys); in __pkvm_create_private_mapping() 71 err = kvm_pgtable_hyp_map(&pkvm_pgtable, addr, size, phys, prot); in __pkvm_create_private_mapping() 77 addr = addr + offset_in_page(phys); in __pkvm_create_private_mapping() 89 phys_addr_t phys; in pkvm_create_mappings_locked() local 99 phys = hyp_virt_to_phys((void *)virt_addr); in pkvm_create_mappings_locked() 101 phys, prot); in pkvm_create_mappings_locked() 183 phys_addr_t phys; in hyp_map_vectors() local [all …]
|
D | setup.c | 73 static int recreate_hyp_mappings(phys_addr_t phys, unsigned long size, in recreate_hyp_mappings() argument 77 void *start, *end, *virt = hyp_phys_to_virt(phys); in recreate_hyp_mappings() 195 phys_addr_t phys; in fix_host_ownership_walker() local 203 phys = kvm_pte_to_phys(pte); in fix_host_ownership_walker() 204 if (!addr_is_memory(phys)) in fix_host_ownership_walker() 214 return host_stage2_set_owner_locked(phys, PAGE_SIZE, pkvm_hyp_id); in fix_host_ownership_walker() 225 return host_stage2_idmap_locked(phys, PAGE_SIZE, prot, false); in fix_host_ownership_walker() 335 int __pkvm_init(phys_addr_t phys, unsigned long size, unsigned long nr_cpus, in __pkvm_init() argument 339 void *virt = hyp_phys_to_virt(phys); in __pkvm_init() 345 if (!PAGE_ALIGNED(phys) || !PAGE_ALIGNED(size)) in __pkvm_init() [all …]
|
/arch/x86/events/intel/ |
D | bts.c | 148 struct bts_phys *phys = &buf->buf[buf->cur_buf]; in bts_config_buffer() local 149 unsigned long index, thresh = 0, end = phys->size; in bts_config_buffer() 150 struct page *page = phys->page; in bts_config_buffer() 155 if (buf->end < phys->offset + buf_size(page)) in bts_config_buffer() 156 end = buf->end - phys->offset - phys->displacement; in bts_config_buffer() 158 index -= phys->offset + phys->displacement; in bts_config_buffer() 168 ds->bts_buffer_base = (u64)(long)page_address(page) + phys->displacement; in bts_config_buffer() 176 static void bts_buffer_pad_out(struct bts_phys *phys, unsigned long head) in bts_buffer_pad_out() argument 178 unsigned long index = head - phys->offset; in bts_buffer_pad_out() 180 memset(page_address(phys->page) + index, 0, phys->size - index); in bts_buffer_pad_out() [all …]
|
/arch/arm64/mm/ |
D | mem_encrypt.c | 52 static int arm_smccc_share_unshare_page(u32 func_id, phys_addr_t phys) in arm_smccc_share_unshare_page() argument 54 phys_addr_t end = phys + PAGE_SIZE; in arm_smccc_share_unshare_page() 56 while (phys < end) { in arm_smccc_share_unshare_page() 59 arm_smccc_1_1_invoke(func_id, phys, 0, 0, &res); in arm_smccc_share_unshare_page() 63 phys += memshare_granule_sz; in arm_smccc_share_unshare_page()
|
D | mmu.c | 97 phys_addr_t phys; in early_pgtable_alloc() local 100 phys = memblock_phys_alloc_range(PAGE_SIZE, PAGE_SIZE, 0, in early_pgtable_alloc() 102 if (!phys) in early_pgtable_alloc() 110 ptr = pte_set_fixmap(phys); in early_pgtable_alloc() 120 return phys; in early_pgtable_alloc() 158 phys_addr_t phys, pgprot_t prot) in init_pte() argument 166 set_pte(ptep, pfn_pte(__phys_to_pfn(phys), prot)); in init_pte() 175 phys += PAGE_SIZE; in init_pte() 182 unsigned long end, phys_addr_t phys, in alloc_init_cont_pte() argument 210 if ((((addr | next | phys) & ~CONT_PTE_MASK) == 0) && in alloc_init_cont_pte() [all …]
|
/arch/x86/include/asm/ |
D | fixmap.h | 161 phys_addr_t phys, pgprot_t flags); 165 phys_addr_t phys, pgprot_t flags) in __set_fixmap() argument 167 native_set_fixmap(idx, phys, flags); in __set_fixmap() 193 #define __late_set_fixmap(idx, phys, flags) __set_fixmap(idx, phys, flags) argument 197 phys_addr_t phys, pgprot_t flags);
|
/arch/mips/n64/ |
D | init.c | 89 unsigned long phys; in n64_platform_init() local 125 phys = virt_to_phys(orig); in n64_platform_init() 126 phys += 63; in n64_platform_init() 127 phys &= ~63; in n64_platform_init() 131 n64rdp_write_reg(i, phys); in n64_platform_init() 140 res[0].start = phys; in n64_platform_init() 141 res[0].end = phys + W * H * 2 - 1; in n64_platform_init()
|
/arch/um/include/shared/ |
D | mem.h | 9 extern int phys_mapping(unsigned long phys, unsigned long long *offset_out); 17 static inline void *to_virt(unsigned long phys) in to_virt() argument 19 return((void *) uml_physmem + phys); in to_virt()
|
D | mem_user.h | 40 unsigned long phys; member 58 extern unsigned long phys_offset(unsigned long phys); 59 extern void map_memory(unsigned long virt, unsigned long phys,
|
/arch/arm/mach-davinci/include/mach/ |
D | uncompress.h | 52 static inline void set_uart_info(u32 phys) in set_uart_info() argument 54 uart = (u32 *)phys; in set_uart_info() 57 #define _DEBUG_LL_ENTRY(machine, phys) \ argument 60 set_uart_info(phys); \
|
/arch/arm64/boot/dts/marvell/ |
D | cn9130-crb-B.dts | 17 phys = <&cp0_comphy0 0>; 30 phys = <&cp0_comphy2 0>; 38 phys = <&cp0_comphy1 0>; 45 phys = <&cp0_comphy3 1>;
|
D | armada-8040-db.dts | 106 phys = <&cp0_comphy0 0>; 113 phys = <&cp0_comphy5 2>; 147 phys = <&cp0_comphy1 0>; 151 phys = <&cp0_comphy3 1>; 163 phys = <&cp0_utmi0>; 178 phys = <&cp0_comphy4 1>, <&cp0_utmi1>; 214 phys = <&cp1_comphy0 0>; 221 phys = <&cp1_comphy4 1>; 228 phys = <&cp1_comphy5 2>; 308 phys = <&cp1_comphy1 0>; [all …]
|
/arch/parisc/kernel/ |
D | kexec.c | 12 unsigned long phys); 76 unsigned long phys); in machine_kexec() 78 unsigned long phys = page_to_phys(image->control_code_page); in machine_kexec() local 82 set_fixmap(FIX_TEXT_KEXEC, phys); in machine_kexec() 105 reloc(image->head & PAGE_MASK, image->start, phys); in machine_kexec()
|
/arch/um/include/asm/ |
D | page.h | 47 #define pte_set_val(p, phys, prot) \ argument 48 ({ (p).pte = (phys) | pgprot_val(prot); }) 74 #define pte_set_val(p, phys, prot) (p).pte = (phys | pgprot_val(prot)) argument 106 #define __va(phys) to_virt((unsigned long) (phys)) argument
|
/arch/powerpc/mm/book3s32/ |
D | mmu.c | 46 phys_addr_t phys; member 61 return bat_addrs[b].phys + (va - bat_addrs[b].start); in v_block_mapped() 72 if (pa >= bat_addrs[b].phys in p_block_mapped() 74 +bat_addrs[b].phys) in p_block_mapped() 75 return bat_addrs[b].start+(pa-bat_addrs[b].phys); in p_block_mapped() 117 static void setibat(int index, unsigned long virt, phys_addr_t phys, in setibat() argument 130 bat[0].batl = BAT_PHYS_ADDR(phys) | wimgxpp; in setibat() 256 void __init setbat(int index, unsigned long virt, phys_addr_t phys, in setbat() argument 268 (unsigned long long)phys); in setbat() 283 bat[1].batl = BAT_PHYS_ADDR(phys) | wimgxpp; in setbat() [all …]
|