Home
last modified time | relevance | path

Searched refs:phys (Results 1 – 25 of 394) sorted by relevance

12345678910>>...16

/arch/xtensa/mm/
Dcache.c146 unsigned long phys = page_to_phys(page); in flush_dcache_page() local
148 unsigned long alias = !(DCACHE_ALIAS_EQ(temp, phys)); in flush_dcache_page()
162 virt = TLBTEMP_BASE_1 + (phys & DCACHE_ALIAS_MASK); in flush_dcache_page()
163 __flush_invalidate_dcache_page_alias(virt, phys); in flush_dcache_page()
168 __flush_invalidate_dcache_page_alias(virt, phys); in flush_dcache_page()
171 __invalidate_icache_page_alias(virt, phys); in flush_dcache_page()
203 unsigned long phys = page_to_phys(pfn_to_page(pfn)); in local_flush_cache_page() local
207 __flush_invalidate_dcache_page_alias(virt, phys); in local_flush_cache_page()
208 __invalidate_icache_page_alias(virt, phys); in local_flush_cache_page()
233 unsigned long phys = page_to_phys(page); in update_mmu_cache() local
[all …]
/arch/m68k/mm/
Dsun3kmap.c29 static inline void do_page_mapin(unsigned long phys, unsigned long virt, in do_page_mapin() argument
35 ptep = pfn_pte(phys >> PAGE_SHIFT, PAGE_KERNEL); in do_page_mapin()
48 static inline void do_pmeg_mapin(unsigned long phys, unsigned long virt, in do_pmeg_mapin() argument
56 do_page_mapin(phys, virt, type); in do_pmeg_mapin()
57 phys += PAGE_SIZE; in do_pmeg_mapin()
63 void __iomem *sun3_ioremap(unsigned long phys, unsigned long size, in sun3_ioremap() argument
74 offset = phys & (PAGE_SIZE-1); in sun3_ioremap()
75 phys &= ~(PAGE_SIZE-1); in sun3_ioremap()
98 do_pmeg_mapin(phys, virt, type, seg_pages); in sun3_ioremap()
101 phys += seg_pages * PAGE_SIZE; in sun3_ioremap()
[all …]
/arch/arm64/kvm/hyp/nvhe/
Dmm.c27 unsigned long phys, enum kvm_pgtable_prot prot) in __pkvm_create_mappings() argument
32 err = kvm_pgtable_hyp_map(&pkvm_pgtable, start, size, phys, prot); in __pkvm_create_mappings()
38 unsigned long __pkvm_create_private_mapping(phys_addr_t phys, size_t size, in __pkvm_create_private_mapping() argument
46 size = PAGE_ALIGN(size + offset_in_page(phys)); in __pkvm_create_private_mapping()
57 err = kvm_pgtable_hyp_map(&pkvm_pgtable, addr, size, phys, prot); in __pkvm_create_private_mapping()
63 addr = addr + offset_in_page(phys); in __pkvm_create_private_mapping()
75 phys_addr_t phys; in pkvm_create_mappings() local
83 phys = hyp_virt_to_phys((void *)virt_addr); in pkvm_create_mappings()
84 err = __pkvm_create_mappings(virt_addr, PAGE_SIZE, phys, prot); in pkvm_create_mappings()
92 int hyp_back_vmemmap(phys_addr_t phys, unsigned long size, phys_addr_t back) in hyp_back_vmemmap() argument
[all …]
Dsetup.c61 static int recreate_hyp_mappings(phys_addr_t phys, unsigned long size, in recreate_hyp_mappings() argument
65 void *start, *end, *virt = hyp_phys_to_virt(phys); in recreate_hyp_mappings()
84 ret = hyp_back_vmemmap(phys, size, hyp_virt_to_phys(vmemmap_base)); in recreate_hyp_mappings()
184 int __pkvm_init(phys_addr_t phys, unsigned long size, unsigned long nr_cpus, in __pkvm_init() argument
188 void *virt = hyp_phys_to_virt(phys); in __pkvm_init()
192 if (!PAGE_ALIGNED(phys) || !PAGE_ALIGNED(size)) in __pkvm_init()
202 ret = recreate_hyp_mappings(phys, size, per_cpu_base, hyp_va_bits); in __pkvm_init()
/arch/arm64/mm/
Ddma-mapping-noalias.c109 static phys_addr_t __arm64_noalias_map(struct device *dev, phys_addr_t phys, in __arm64_noalias_map() argument
113 bounce = bounce || (phys | size) & ~PAGE_MASK; in __arm64_noalias_map()
115 phys = swiotlb_tbl_map_single(dev, phys, size, PAGE_ALIGN(size), in __arm64_noalias_map()
117 if (phys == DMA_MAPPING_ERROR) in __arm64_noalias_map()
120 if (set_nc(phys_to_virt(phys & PAGE_MASK), PAGE_ALIGN(size))) in __arm64_noalias_map()
123 return phys; in __arm64_noalias_map()
126 swiotlb_tbl_unmap_single(dev, phys, size, PAGE_ALIGN(size), dir, in __arm64_noalias_map()
132 static void __arm64_noalias_unmap(struct device *dev, phys_addr_t phys, size_t size, in __arm64_noalias_unmap() argument
135 clear_nc(phys_to_virt(phys & PAGE_MASK), PAGE_ALIGN(size)); in __arm64_noalias_unmap()
136 if (is_swiotlb_buffer(phys)) in __arm64_noalias_unmap()
[all …]
Dmmu.c95 phys_addr_t phys; in early_pgtable_alloc() local
98 phys = memblock_phys_alloc(PAGE_SIZE, PAGE_SIZE); in early_pgtable_alloc()
99 if (!phys) in early_pgtable_alloc()
107 ptr = pte_set_fixmap(phys); in early_pgtable_alloc()
117 return phys; in early_pgtable_alloc()
155 phys_addr_t phys, pgprot_t prot) in init_pte() argument
163 set_pte(ptep, pfn_pte(__phys_to_pfn(phys), prot)); in init_pte()
172 phys += PAGE_SIZE; in init_pte()
179 unsigned long end, phys_addr_t phys, in alloc_init_cont_pte() argument
203 if ((((addr | next | phys) & ~CONT_PTE_MASK) == 0) && in alloc_init_cont_pte()
[all …]
/arch/um/kernel/
Dphysmem.c41 void map_memory(unsigned long virt, unsigned long phys, unsigned long len, in map_memory() argument
47 fd = phys_mapping(phys, &offset); in map_memory()
118 int phys_mapping(unsigned long phys, unsigned long long *offset_out) in phys_mapping() argument
122 if (phys < physmem_size) { in phys_mapping()
124 *offset_out = phys; in phys_mapping()
126 else if (phys < __pa(end_iomem)) { in phys_mapping()
130 if ((phys >= region->phys) && in phys_mapping()
131 (phys < region->phys + region->size)) { in phys_mapping()
133 *offset_out = phys - region->phys; in phys_mapping()
139 else if (phys < __pa(end_iomem) + highmem) { in phys_mapping()
[all …]
/arch/arm64/kvm/hyp/include/nvhe/
Dmemory.h22 #define __hyp_va(phys) ((void *)((phys_addr_t)(phys) - hyp_physvirt_offset)) argument
24 static inline void *hyp_phys_to_virt(phys_addr_t phys) in hyp_phys_to_virt() argument
26 return __hyp_va(phys); in hyp_phys_to_virt()
34 #define hyp_phys_to_pfn(phys) ((phys) >> PAGE_SHIFT) argument
36 #define hyp_phys_to_page(phys) (&hyp_vmemmap[hyp_phys_to_pfn(phys)]) argument
Dmm.h23 int hyp_back_vmemmap(phys_addr_t phys, unsigned long size, phys_addr_t back);
27 unsigned long phys, enum kvm_pgtable_prot prot);
28 unsigned long __pkvm_create_private_mapping(phys_addr_t phys, size_t size,
31 static inline void hyp_vmemmap_range(phys_addr_t phys, unsigned long size, in hyp_vmemmap_range() argument
35 struct hyp_page *p = hyp_phys_to_page(phys); in hyp_vmemmap_range()
/arch/powerpc/mm/nohash/
Dfsl_booke.c60 phys_addr_t phys; member
77 return tlbcam_addrs[b].phys + (va - tlbcam_addrs[b].start); in v_block_mapped()
88 if (pa >= tlbcam_addrs[b].phys in p_block_mapped()
90 +tlbcam_addrs[b].phys) in p_block_mapped()
91 return tlbcam_addrs[b].start+(pa-tlbcam_addrs[b].phys); in p_block_mapped()
103 static void settlbcam(int index, unsigned long virt, phys_addr_t phys, in settlbcam() argument
125 TLBCAM[index].MAS3 = (phys & MAS3_RPN) | MAS3_SX | MAS3_SR; in settlbcam()
128 TLBCAM[index].MAS7 = (u64)phys >> 32; in settlbcam()
138 tlbcam_addrs[index].phys = phys; in settlbcam()
142 phys_addr_t phys) in calc_cam_sz() argument
[all …]
/arch/powerpc/mm/kasan/
Dbook3s_32.c15 phys_addr_t phys; in kasan_init_region() local
26 phys = memblock_phys_alloc_range(k_size, k_size, 0, in kasan_init_region()
28 if (!phys) in kasan_init_region()
31 setbat(idx, k_nobat, phys, k_size, PAGE_KERNEL); in kasan_init_region()
38 phys = memblock_phys_alloc_range(k_end - k_nobat, PAGE_SIZE, 0, in kasan_init_region()
40 if (!phys) in kasan_init_region()
52 pte_t pte = pfn_pte(PHYS_PFN(phys + k_cur - k_nobat), PAGE_KERNEL); in kasan_init_region()
/arch/arm64/kernel/
Dacpi.c94 void __init __iomem *__acpi_map_table(unsigned long phys, unsigned long size) in __acpi_map_table() argument
99 return early_memremap(phys, size); in __acpi_map_table()
264 void __iomem *acpi_os_ioremap(acpi_physical_address phys, acpi_size size) in acpi_os_ioremap() argument
275 if (phys < md->phys_addr || phys >= end) in acpi_os_ioremap()
278 if (phys + size > end) { in acpi_os_ioremap()
301 if (memblock_is_map_memory(phys) || in acpi_os_ioremap()
302 !memblock_is_region_memory(phys, size)) { in acpi_os_ioremap()
303 pr_warn(FW_BUG "requested region covers kernel memory @ %pa\n", &phys); in acpi_os_ioremap()
336 if (memblock_is_map_memory(phys)) in acpi_os_ioremap()
337 return (void __iomem *)__phys_to_virt(phys); in acpi_os_ioremap()
[all …]
/arch/sh/include/asm/
Dmmu.h62 int pmb_bolt_mapping(unsigned long virt, phys_addr_t phys,
64 void __iomem *pmb_remap_caller(phys_addr_t phys, unsigned long size,
71 pmb_bolt_mapping(unsigned long virt, phys_addr_t phys, in pmb_bolt_mapping() argument
78 pmb_remap_caller(phys_addr_t phys, unsigned long size, in pmb_remap_caller() argument
100 pmb_remap(phys_addr_t phys, unsigned long size, pgprot_t prot) in pmb_remap() argument
102 return pmb_remap_caller(phys, size, prot, __builtin_return_address(0)); in pmb_remap()
/arch/sh/boards/mach-sdk7786/
Dsram.c20 unsigned long phys; in fpga_sram_init() local
44 phys = (area << 26) + SZ_64M - SZ_4K; in fpga_sram_init()
50 vaddr = ioremap(phys, SZ_2K); in fpga_sram_init()
58 SZ_2K >> 10, phys, phys + SZ_2K - 1, area); in fpga_sram_init()
/arch/x86/events/intel/
Dbts.c148 struct bts_phys *phys = &buf->buf[buf->cur_buf]; in bts_config_buffer() local
149 unsigned long index, thresh = 0, end = phys->size; in bts_config_buffer()
150 struct page *page = phys->page; in bts_config_buffer()
155 if (buf->end < phys->offset + buf_size(page)) in bts_config_buffer()
156 end = buf->end - phys->offset - phys->displacement; in bts_config_buffer()
158 index -= phys->offset + phys->displacement; in bts_config_buffer()
168 ds->bts_buffer_base = (u64)(long)page_address(page) + phys->displacement; in bts_config_buffer()
176 static void bts_buffer_pad_out(struct bts_phys *phys, unsigned long head) in bts_buffer_pad_out() argument
178 unsigned long index = head - phys->offset; in bts_buffer_pad_out()
180 memset(page_address(phys->page) + index, 0, phys->size - index); in bts_buffer_pad_out()
[all …]
/arch/x86/include/asm/
Dfixmap.h159 phys_addr_t phys, pgprot_t flags);
163 phys_addr_t phys, pgprot_t flags) in __set_fixmap() argument
165 native_set_fixmap(idx, phys, flags); in __set_fixmap()
191 #define __late_set_fixmap(idx, phys, flags) __set_fixmap(idx, phys, flags) argument
195 phys_addr_t phys, pgprot_t flags);
/arch/um/include/shared/
Dmem.h9 extern int phys_mapping(unsigned long phys, unsigned long long *offset_out);
17 static inline void *to_virt(unsigned long phys) in to_virt() argument
19 return((void *) uml_physmem + phys); in to_virt()
Dmem_user.h40 unsigned long phys; member
58 extern unsigned long phys_offset(unsigned long phys);
59 extern void map_memory(unsigned long virt, unsigned long phys,
/arch/arm/mach-davinci/include/mach/
Duncompress.h52 static inline void set_uart_info(u32 phys) in set_uart_info() argument
54 uart = (u32 *)phys; in set_uart_info()
57 #define _DEBUG_LL_ENTRY(machine, phys) \ argument
60 set_uart_info(phys); \
/arch/parisc/kernel/
Dkexec.c12 unsigned long phys);
76 unsigned long phys); in machine_kexec()
78 unsigned long phys = page_to_phys(image->control_code_page); in machine_kexec() local
82 set_fixmap(FIX_TEXT_KEXEC, phys); in machine_kexec()
105 reloc(image->head & PAGE_MASK, image->start, phys); in machine_kexec()
/arch/um/include/asm/
Dpage.h47 #define pte_set_val(p, phys, prot) \ argument
48 ({ (p).pte = (phys) | pgprot_val(prot); })
74 #define pte_set_val(p, phys, prot) (p).pte = (phys | pgprot_val(prot)) argument
106 #define __va(phys) to_virt((unsigned long) (phys)) argument
/arch/powerpc/mm/book3s32/
Dmmu.c46 phys_addr_t phys; member
57 return bat_addrs[b].phys + (va - bat_addrs[b].start); in v_block_mapped()
68 if (pa >= bat_addrs[b].phys in p_block_mapped()
70 +bat_addrs[b].phys) in p_block_mapped()
71 return bat_addrs[b].start+(pa-bat_addrs[b].phys); in p_block_mapped()
113 static void setibat(int index, unsigned long virt, phys_addr_t phys, in setibat() argument
126 bat[0].batl = BAT_PHYS_ADDR(phys) | wimgxpp; in setibat()
259 void __init setbat(int index, unsigned long virt, phys_addr_t phys, in setbat() argument
271 (unsigned long long)phys); in setbat()
286 bat[1].batl = BAT_PHYS_ADDR(phys) | wimgxpp; in setbat()
[all …]
/arch/powerpc/include/asm/
Ddma.h271 static __inline__ void set_dma_addr(unsigned int dmanr, unsigned int phys) in set_dma_addr() argument
274 dma_outb(phys & 0xff, in set_dma_addr()
276 dma_outb((phys >> 8) & 0xff, in set_dma_addr()
279 dma_outb((phys >> 1) & 0xff, in set_dma_addr()
281 dma_outb((phys >> 9) & 0xff, in set_dma_addr()
284 set_dma_page(dmanr, phys >> 16); in set_dma_addr()
Dsmp.h108 static inline void set_hard_smp_processor_id(int cpu, int phys) in set_hard_smp_processor_id() argument
110 smp_hw_index[cpu] = phys; in set_hard_smp_processor_id()
215 static inline void set_hard_smp_processor_id(int cpu, int phys) in set_hard_smp_processor_id() argument
217 paca_ptrs[cpu]->hw_cpu_id = phys; in set_hard_smp_processor_id()
228 static inline void set_hard_smp_processor_id(int cpu, int phys) in set_hard_smp_processor_id() argument
230 boot_cpuid_phys = phys; in set_hard_smp_processor_id()
/arch/sh/mm/
Dcache-sh4.c30 static void __flush_cache_one(unsigned long addr, unsigned long phys,
88 static inline void flush_cache_one(unsigned long start, unsigned long phys) in flush_cache_one() argument
101 __flush_cache_one(start, phys, exec_offset); in flush_cache_one()
208 unsigned long address, pfn, phys; in sh4_flush_cache_page() local
217 phys = pfn << PAGE_SHIFT; in sh4_flush_cache_page()
249 (address & shm_align_mask), phys); in sh4_flush_cache_page()
311 static void __flush_cache_one(unsigned long addr, unsigned long phys, in __flush_cache_one() argument
350 p = phys; in __flush_cache_one()

12345678910>>...16