Home
last modified time | relevance | path

Searched refs:__pa (Results 1 – 25 of 320) sorted by relevance

12345678910>>...13

/arch/xtensa/kernel/
Dsetup.c463 initrd_is_mapped = mem_reserve(__pa(initrd_start), in setup_arch()
464 __pa(initrd_end), 0) == 0; in setup_arch()
471 mem_reserve(__pa(&_stext),__pa(&_end), 1); in setup_arch()
473 mem_reserve(__pa(&_WindowVectors_text_start), in setup_arch()
474 __pa(&_WindowVectors_text_end), 0); in setup_arch()
476 mem_reserve(__pa(&_DebugInterruptVector_literal_start), in setup_arch()
477 __pa(&_DebugInterruptVector_text_end), 0); in setup_arch()
479 mem_reserve(__pa(&_KernelExceptionVector_literal_start), in setup_arch()
480 __pa(&_KernelExceptionVector_text_end), 0); in setup_arch()
482 mem_reserve(__pa(&_UserExceptionVector_literal_start), in setup_arch()
[all …]
/arch/parisc/kernel/
Dfirmware.c158 __pa(pdc_result), 0); in set_firmware_width_unlocked()
242 __pa(pdc_result), __pa(pdc_result2), len); in pdc_chassis_info()
266 retval = mem_pdc_call(PDC_PAT_CHASSIS_LOG, PDC_PAT_CHASSIS_WRITE_LOG, __pa(&state), __pa(&data)); in pdc_pat_chassis_send_log()
299 retval = mem_pdc_call(PDC_CHASSIS, PDC_CHASSIS_WARN, __pa(pdc_result)); in pdc_chassis_warn()
310 ret = mem_pdc_call(PDC_COPROC, PDC_COPROC_CFG, __pa(pdc_result)); in pdc_coproc_cfg_unlocked()
357 retval = mem_pdc_call(PDC_IODC, PDC_IODC_READ, __pa(pdc_result), hpa, in pdc_iodc_read()
358 index, __pa(pdc_result2), iodc_data_size); in pdc_iodc_read()
384 retval = mem_pdc_call(PDC_SYSTEM_MAP, PDC_FIND_MODULE, __pa(pdc_result), in pdc_system_map_find_mods()
385 __pa(pdc_result2), mod_index); in pdc_system_map_find_mods()
411 retval = mem_pdc_call(PDC_SYSTEM_MAP, PDC_FIND_ADDRESS, __pa(pdc_result), in pdc_system_map_find_addrs()
[all …]
/arch/ia64/hp/sim/boot/
Dfw-emu.c284 efi_systab->fw_vendor = __pa("H\0e\0w\0l\0e\0t\0t\0-\0P\0a\0c\0k\0a\0r\0d\0\0"); in sys_fw_init()
286 efi_systab->runtime = (void *) __pa(efi_runtime); in sys_fw_init()
288 efi_systab->tables = __pa(efi_tables); in sys_fw_init()
293 efi_runtime->get_time = (void *)__pa(&fw_efi_get_time); in sys_fw_init()
294 efi_runtime->set_time = (void *)__pa(&efi_unimplemented); in sys_fw_init()
295 efi_runtime->get_wakeup_time = (void *)__pa(&efi_unimplemented); in sys_fw_init()
296 efi_runtime->set_wakeup_time = (void *)__pa(&efi_unimplemented); in sys_fw_init()
297 efi_runtime->set_virtual_address_map = (void *)__pa(&efi_unimplemented); in sys_fw_init()
298 efi_runtime->get_variable = (void *)__pa(&efi_unimplemented); in sys_fw_init()
299 efi_runtime->get_next_variable = (void *)__pa(&efi_unimplemented); in sys_fw_init()
[all …]
/arch/arm/mach-omap2/
Domap-secure.c54 outer_clean_range(__pa(param), __pa(param + 5)); in omap_secure_dispatcher()
55 ret = omap_smc2(idx, flag, __pa(param)); in omap_secure_dispatcher()
108 outer_clean_range(__pa(param), __pa(param + 5)); in rx51_secure_dispatcher()
109 ret = omap_smc3(idx, process, flag, __pa(param)); in rx51_secure_dispatcher()
/arch/score/kernel/
Dsetup.c49 start_pfn = PFN_UP(__pa(&_end)); in bootmem_init()
73 if (__pa(initrd_end) > PFN_PHYS(max_low_pfn)) { in bootmem_init()
79 reserve_bootmem(__pa(initrd_start), size, BOOTMEM_DEFAULT); in bootmem_init()
95 code_resource.start = __pa(&_text); in resource_init()
96 code_resource.end = __pa(&_etext) - 1; in resource_init()
97 data_resource.start = __pa(&_etext); in resource_init()
98 data_resource.end = __pa(&_edata) - 1; in resource_init()
/arch/parisc/mm/
Dinit.c268 bootmap_start_pfn = PAGE_ALIGN(__pa((unsigned long) &_end)) >> PAGE_SHIFT; in setup_bootmem()
328 reserve_bootmem_node(NODE_DATA(0), __pa(KERNEL_BINARY_TEXT_START), in setup_bootmem()
350 if (__pa(initrd_start) < mem_max) { in setup_bootmem()
353 if (__pa(initrd_end) > mem_max) { in setup_bootmem()
354 initrd_reserve = mem_max - __pa(initrd_start); in setup_bootmem()
359 …printk(KERN_INFO "initrd: reserving %08lx-%08lx (mem_max %08lx)\n", __pa(initrd_start), __pa(initr… in setup_bootmem()
361 reserve_bootmem_node(NODE_DATA(0), __pa(initrd_start), in setup_bootmem()
415 ro_start = __pa((unsigned long)_text); in map_pages()
416 ro_end = __pa((unsigned long)&data_start); in map_pages()
417 fv_addr = __pa((unsigned long)&fault_vector_20) & PAGE_MASK; in map_pages()
[all …]
/arch/x86/include/asm/
Dpgalloc.h65 paravirt_alloc_pte(mm, __pa(pte) >> PAGE_SHIFT); in pmd_populate_kernel()
66 set_pmd(pmd, __pmd(__pa(pte) | _PAGE_TABLE)); in pmd_populate_kernel()
114 paravirt_alloc_pmd(mm, __pa(pmd) >> PAGE_SHIFT); in pud_populate()
115 set_pud(pud, __pud(_PAGE_TABLE | __pa(pmd))); in pud_populate()
122 paravirt_alloc_pud(mm, __pa(pud) >> PAGE_SHIFT); in pgd_populate()
123 set_pgd(pgd, __pgd(_PAGE_TABLE | __pa(pud))); in pgd_populate()
Dpage.h40 #define __pa(x) __phys_addr((unsigned long)(x)) macro
57 #define __boot_pa(x) __pa(x)
63 #define virt_to_page(kaddr) pfn_to_page(__pa(kaddr) >> PAGE_SHIFT)
/arch/um/kernel/
Dphysmem.c64 int pfn = PFN_UP(__pa(reserve_end)); in setup_physmem()
84 os_seek_file(physmem_fd, __pa(&__syscall_stub_start)); in setup_physmem()
89 free_bootmem(__pa(reserve_end) + bootmap_size, in setup_physmem()
101 else if (phys < __pa(end_iomem)) { in phys_mapping()
114 else if (phys < __pa(end_iomem) + highmem) { in phys_mapping()
186 region->phys = __pa(region->virt); in setup_iomem()
/arch/x86/kernel/
Dmachine_kexec_32.c110 set_pgd(pgd, __pgd(__pa(pmd) | _PAGE_PRESENT)); in machine_kexec_page_table_set_one()
115 set_pmd(pmd, __pmd(__pa(pte) | _PAGE_TABLE)); in machine_kexec_page_table_set_one()
131 (unsigned long)control_page, __pa(control_page)); in machine_kexec_prepare_page_tables()
137 __pa(control_page), __pa(control_page)); in machine_kexec_prepare_page_tables()
221 page_list[PA_CONTROL_PAGE] = __pa(control_page); in machine_kexec()
223 page_list[PA_PGD] = __pa(image->arch.pgd); in machine_kexec()
Despfix_64.c177 pud = __pud(__pa(pmd_p) | (PGTABLE_PROT & ptemask)); in init_espfix_ap()
178 paravirt_alloc_pmd(&init_mm, __pa(pmd_p) >> PAGE_SHIFT); in init_espfix_ap()
187 pmd = __pmd(__pa(pte_p) | (PGTABLE_PROT & ptemask)); in init_espfix_ap()
188 paravirt_alloc_pte(&init_mm, __pa(pte_p) >> PAGE_SHIFT); in init_espfix_ap()
195 pte = __pte(__pa(stack_page) | (__PAGE_KERNEL_RO & ptemask)); in init_espfix_ap()
/arch/powerpc/kernel/
Dsuspend.c20 unsigned long nosave_begin_pfn = __pa(&__nosave_begin) >> PAGE_SHIFT; in pfn_is_nosave()
21 unsigned long nosave_end_pfn = PAGE_ALIGN(__pa(&__nosave_end)) >> PAGE_SHIFT; in pfn_is_nosave()
/arch/hexagon/include/asm/
Dpage.h99 #define __pa(x) ((unsigned long)(x) - PAGE_OFFSET + PHYS_OFFSET) macro
106 #define virt_to_page(kaddr) pfn_to_page(PFN_DOWN(__pa(kaddr)))
113 #define virt_addr_valid(kaddr) pfn_valid(__pa(kaddr) >> PAGE_SHIFT)
143 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT)
/arch/cris/include/arch-v10/arch/
Dpage.h21 #define __pa(x) ((unsigned long)(x) & 0xdfffffff) macro
25 #define __pa(x) ((unsigned long)(x) & 0x7fffffff) macro
/arch/sh/kernel/
Dswsusp.c24 unsigned long begin_pfn = __pa(&__nosave_begin) >> PAGE_SHIFT; in pfn_is_nosave()
25 unsigned long end_pfn = PAGE_ALIGN(__pa(&__nosave_end)) >> PAGE_SHIFT; in pfn_is_nosave()
/arch/m68k/include/asm/
Dvirtconvert.h20 return __pa(address); in virt_to_phys()
31 __pa(PAGE_OFFSET + (((page) - pg_data_map[0].node_mem_map) << PAGE_SHIFT))
/arch/mips/power/
Dcpu.c39 unsigned long nosave_begin_pfn = PFN_DOWN(__pa(&__nosave_begin)); in pfn_is_nosave()
40 unsigned long nosave_end_pfn = PFN_UP(__pa(&__nosave_end)); in pfn_is_nosave()
/arch/x86/xen/
Defi.c39 boot_params.efi_info.efi_systab = (__u32)__pa(efi_systab_xen); in xen_efi_init()
40 boot_params.efi_info.efi_systab_hi = (__u32)(__pa(efi_systab_xen) >> 32); in xen_efi_init()
/arch/s390/kernel/
Dsuspend.c139 unsigned long nosave_begin_pfn = PFN_DOWN(__pa(&__nosave_begin)); in pfn_is_nosave()
140 unsigned long nosave_end_pfn = PFN_DOWN(__pa(&__nosave_end)); in pfn_is_nosave()
141 unsigned long eshared_pfn = PFN_DOWN(__pa(&_eshared)) - 1; in pfn_is_nosave()
142 unsigned long stext_pfn = PFN_DOWN(__pa(&_stext)); in pfn_is_nosave()
/arch/powerpc/platforms/powernv/
Dpci-p5ioc2.c116 pr_devel(" TCE AT : 0x%016lx\n", __pa(tce_mem)); in pnv_pci_init_p5ioc2_phb()
119 rc = opal_pci_set_phb_tce_memory(phb_id, __pa(tce_mem), tce_size); in pnv_pci_init_p5ioc2_phb()
204 __pa(MAX_DMA_ADDRESS)); in pnv_pci_init_p5ioc2_hub()
210 __pa(tce_mem), __pa(tce_mem) + P5IOC2_TCE_MEMORY - 1); in pnv_pci_init_p5ioc2_hub()
211 rc = opal_pci_set_hub_tce_memory(hub_id, __pa(tce_mem), in pnv_pci_init_p5ioc2_hub()
/arch/parisc/include/asm/
Dpgalloc.h37 + (__u32)(__pa((unsigned long)pgd) >> PxD_VALUE_SHIFT)); in pgd_alloc()
61 (__u32)(__pa((unsigned long)pmd) >> PxD_VALUE_SHIFT)); in pgd_populate()
109 + (__u32)(__pa((unsigned long)pte) >> PxD_VALUE_SHIFT)); in pmd_populate_kernel()
113 + (__u32)(__pa((unsigned long)pte) >> PxD_VALUE_SHIFT)); in pmd_populate_kernel()
/arch/arc/include/asm/
Dpage.h91 #define __pa(vaddr) ((unsigned long)vaddr) macro
95 (mem_map + ((__pa(kaddr) - CONFIG_LINUX_LINK_BASE) >> PAGE_SHIFT))
97 #define virt_addr_valid(kaddr) pfn_valid(__pa(kaddr) >> PAGE_SHIFT)
/arch/m32r/include/asm/
Dpage.h69 #define __pa(x) ((unsigned long)(x) - PAGE_OFFSET) macro
78 #define virt_to_page(kaddr) pfn_to_page(__pa(kaddr) >> PAGE_SHIFT)
79 #define virt_addr_valid(kaddr) pfn_valid(__pa(kaddr) >> PAGE_SHIFT)
/arch/ia64/hp/sim/
Dsimscsi.c134 req.addr = __pa(sg_virt(sl)); in simscsi_sg_readwrite()
140 ia64_ssc(stat.fd, 1, __pa(&req), offset, mode); in simscsi_sg_readwrite()
141 ia64_ssc(__pa(&stat), 0, 0, 0, SSC_WAIT_COMPLETION); in simscsi_sg_readwrite()
181 req.addr = __pa(&buf); in simscsi_get_disk_size()
183 ia64_ssc(fd, 1, __pa(&req), ((sectors | bit) - 1)*512, SSC_READ); in simscsi_get_disk_size()
185 ia64_ssc(__pa(&stat), 0, 0, 0, SSC_WAIT_COMPLETION); in simscsi_get_disk_size()
229 desc[target_id] = ia64_ssc(__pa(fname), SSC_READ_ACCESS|SSC_WRITE_ACCESS, in simscsi_queuecommand_lck()
/arch/arm64/mm/
Dmmu.c100 __pmd_populate(pmd, __pa(pte), PMD_TYPE_TABLE); in alloc_init_pte()
162 phys_addr_t table = __pa(pte_offset_map(&old_pmd, 0)); in alloc_init_pmd()
223 phys_addr_t table = __pa(pmd_offset(&old_pud, 0)); in alloc_init_pud()
308 unsigned long kernel_x_start = round_down(__pa(_stext), SECTION_SIZE); in __map_memblock()
309 unsigned long kernel_x_end = round_up(__pa(__init_end), SECTION_SIZE); in __map_memblock()
397 unsigned long aligned_start = round_down(__pa(_stext), in fixup_executable()
401 __pa(_stext) - aligned_start, in fixup_executable()
406 unsigned long aligned_end = round_up(__pa(__init_end), in fixup_executable()
408 create_mapping(__pa(__init_end), (unsigned long)__init_end, in fixup_executable()
409 aligned_end - __pa(__init_end), in fixup_executable()
[all …]

12345678910>>...13